pax_global_header00006660000000000000000000000064150611624260014515gustar00rootroot0000000000000052 comment=4d4cb0ab842247caea1963132c242c62aab1e4d5 python-semantic-release-10.4.1/000077500000000000000000000000001506116242600163405ustar00rootroot00000000000000python-semantic-release-10.4.1/.dockerignore000066400000000000000000000001021506116242600210050ustar00rootroot00000000000000tests build dist docs htmlcov .git .mypy_cache .pytest_cache .tox python-semantic-release-10.4.1/.gitattributes000066400000000000000000000001211506116242600212250ustar00rootroot00000000000000# https://help.github.com/articles/dealing-with-line-endings/ * text=auto eol=lf python-semantic-release-10.4.1/.github/000077500000000000000000000000001506116242600177005ustar00rootroot00000000000000python-semantic-release-10.4.1/.github/ISSUE_TEMPLATE/000077500000000000000000000000001506116242600220635ustar00rootroot00000000000000python-semantic-release-10.4.1/.github/ISSUE_TEMPLATE/bug-report.md000066400000000000000000000037761506116242600245100ustar00rootroot00000000000000--- name: Bug Report about: Something isn't working as expected labels: - bug - triage --- ## Bug Report ### Description ### Expected behavior ### Actual behavior ### Environment - **Operating System (w/ version):** - **Python version:** - **Pip version:** - **Semantic-release version:** - **Build tool (w/ version):**
pip freeze ```log ```

git log --oneline --decorate --graph --all -n 50 ```log ```
### Configuration
Semantic Release Configuration ```toml ```

Build System Configuration ```toml ```

GitHub Actions Job Definition ```yaml ```
### Execution Log
semantic-release -vv command ```log ```
### Additional context python-semantic-release-10.4.1/.github/ISSUE_TEMPLATE/documentation.md000066400000000000000000000004461506116242600252620ustar00rootroot00000000000000--- name: Documentation about: I found an error or gap in the docs labels: - docs - triage --- ## Documentation Error ### Description python-semantic-release-10.4.1/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000006761506116242600256210ustar00rootroot00000000000000--- name: Feature Request about: Suggest a new idea labels: - feature - triage --- ## Feature Request ### Description ### Use cases ### Possible implementation ### Alternative solutions python-semantic-release-10.4.1/.github/ISSUE_TEMPLATE/question.md000066400000000000000000000014711506116242600242570ustar00rootroot00000000000000--- name: Question about: I have a question about Python Semantic Release labels: - question - triage --- ## Question ### Configuration
Semantic Release Configuration ```toml ```
## Additional context
git log --oneline --decorate --graph --all -n 50 ```log ```
python-semantic-release-10.4.1/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000031411506116242600235000ustar00rootroot00000000000000 ## Purpose ## Rationale ## How did you test? ## How to Verify --- ## PR Completion Checklist - [ ] Reviewed & followed the [Contributor Guidelines](https://python-semantic-release.readthedocs.io/en/latest/contributing.html) - [ ] Changes Implemented & Validation pipeline succeeds - [ ] Commits follow the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) standard and are separated into the proper commit type and scope (recommended order: test, build, feat/fix, docs) - [ ] Appropriate Unit tests added/updated - [ ] Appropriate End-to-End tests added/updated - [ ] Appropriate Documentation added/updated and syntax validated for sphinx build (see Contributor Guidelines) python-semantic-release-10.4.1/.github/changed-files-spec.yml000066400000000000000000000005441506116242600240470ustar00rootroot00000000000000--- build: - MANIFEST.in - scripts/** docs: - docs/** - README.rst - AUTHORS.rst - CONTRIBUTING.rst - CHANGELOG.rst gha_src: - src/gh_action/** src: - src/semantic_release/** - pyproject.toml gha_tests: - tests/gh_action/** tests: - tests/e2e/** - tests/fixtures/** - tests/unit/** - tests/*.py python-semantic-release-10.4.1/.github/dependabot.yaml000066400000000000000000000017671506116242600227040ustar00rootroot00000000000000--- version: 2 updates: - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" day: "monday" time: "18:00" commit-message: prefix: "build" include: "scope" labels: - dependencies - dependabot open-pull-requests-limit: 10 rebase-strategy: auto versioning-strategy: "increase-if-necessary" # Maintain dependencies for Docker (ie our GitHub Action) - package-ecosystem: "docker" directory: "src/gh_action" schedule: interval: "monthly" labels: - dependencies - dependabot rebase-strategy: auto commit-message: prefix: "build" include: "scope" # (deps) - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" day: "monday" time: "18:00" commit-message: prefix: "ci" labels: - dependencies - dependabot rebase-strategy: auto groups: github-actions: patterns: - "*" python-semantic-release-10.4.1/.github/workflows/000077500000000000000000000000001506116242600217355ustar00rootroot00000000000000python-semantic-release-10.4.1/.github/workflows/ci.yml000066400000000000000000000112771506116242600230630ustar00rootroot00000000000000name: CI on: pull_request: types: [opened, synchronize, reopened, ready_for_review] branches: # Target branches - master # default token permissions = none permissions: {} # If a new push is made to the branch, cancel the previous run concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true jobs: commitlint: # condition: Execute IFF it is protected branch update, or a PR that is NOT in a draft state if: ${{ github.event_name != 'pull_request' || !github.event.pull_request.draft }} runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: fetch-depth: 0 - uses: wagoid/commitlint-github-action@b948419dd99f3fd78a6548d48f94e3df7f6bf3ed # v6.2.1 eval-changes: name: Evaluate changes # condition: Execute IFF it is protected branch update, or a PR that is NOT in a draft state if: ${{ github.event_name != 'pull_request' || !github.event.pull_request.draft }} runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: fetch-depth: 100 - name: Evaluate | Check common file types for changes id: core-changed-files uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c #v46.0.5 with: files_yaml_from_source_file: .github/changed-files-spec.yml - name: Evaluate | Check specific file types for changes id: ci-changed-files uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c #v46.0.5 with: files_yaml: | ci: - .github/workflows/ci.yml - .github/workflows/validate.yml - name: Evaluate | Detect if any of the combinations of file sets have changed id: all-changes run: | printf '%s\n' "any_changed=false" >> $GITHUB_OUTPUT if [ "${{ steps.core-changed-files.outputs.build_any_changed }}" == "true" ] || \ [ "${{ steps.ci-changed-files.outputs.ci_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.docs_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.src_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.tests_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.gha_src_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.gha_tests_any_changed }}" == "true" ]; then printf '%s\n' "any_changed=true" >> $GITHUB_OUTPUT fi outputs: # essentially casts the string output to a boolean for GitHub any-file-changes: ${{ steps.all-changes.outputs.any_changed }} build-changes: ${{ steps.core-changed-files.outputs.build_any_changed }} ci-changes: ${{ steps.ci-changed-files.outputs.ci_any_changed }} doc-changes: ${{ steps.core-changed-files.outputs.docs_any_changed }} src-changes: ${{ steps.core-changed-files.outputs.src_any_changed }} test-changes: ${{ steps.core-changed-files.outputs.tests_any_changed }} gha-src-changes: ${{ steps.core-changed-files.outputs.gha_src_any_changed }} gha-test-changes: ${{ steps.core-changed-files.outputs.gha_tests_any_changed }} validate: needs: eval-changes uses: ./.github/workflows/validate.yml with: # It was a bit of overkill before testing every minor version, and since this project is all about # SemVer, we should expect Python to adhere to that model to. Therefore Only test across 2 OS's but # the lowest supported minor version and the latest stable minor version (just in case). python-versions-linux: '["3.8", "3.13"]' # Since the test suite takes ~4 minutes to complete on windows, and windows is billed higher # we are only going to run it on the oldest version of python we support. The older version # will be the most likely area to fail as newer minor versions maintain compatibility. python-versions-windows: '["3.8"]' files-changed: ${{ needs.eval-changes.outputs.any-file-changes }} build-files-changed: ${{ needs.eval-changes.outputs.build-changes }} ci-files-changed: ${{ needs.eval-changes.outputs.ci-changes }} doc-files-changed: ${{ needs.eval-changes.outputs.doc-changes }} src-files-changed: ${{ needs.eval-changes.outputs.src-changes }} test-files-changed: ${{ needs.eval-changes.outputs.test-changes }} gha-src-files-changed: ${{ needs.eval-changes.outputs.gha-src-changes }} gha-test-files-changed: ${{ needs.eval-changes.outputs.gha-test-changes }} permissions: {} secrets: {} python-semantic-release-10.4.1/.github/workflows/cicd.yml000066400000000000000000000221401506116242600233610ustar00rootroot00000000000000--- name: CI/CD on: push: branches: - master - release/** # default token permissions = none permissions: {} jobs: eval-changes: name: Evaluate changes runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: fetch-depth: 100 # Must at least retrieve a set of commits to compare changes # primarily because of any 'Rebase and Merge' PR action in GitHub - name: Evaluate | Check common file types for changes id: core-changed-files uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c #v46.0.5 with: base_sha: ${{ github.event.push.before }} files_yaml_from_source_file: .github/changed-files-spec.yml - name: Evaluate | Check specific file types for changes id: ci-changed-files uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c #v46.0.5 with: base_sha: ${{ github.event.push.before }} files_yaml: | ci: - .github/workflows/cicd.yml - .github/workflows/validate.yml - name: Evaluate | Detect if any of the combinations of file sets have changed id: all-changes run: | printf '%s\n' "any_changed=false" >> $GITHUB_OUTPUT if [ "${{ steps.core-changed-files.outputs.build_any_changed }}" == "true" ] || \ [ "${{ steps.ci-changed-files.outputs.ci_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.docs_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.src_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.tests_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.gha_src_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.gha_tests_any_changed }}" == "true" ]; then printf '%s\n' "any_changed=true" >> $GITHUB_OUTPUT fi outputs: any-file-changes: ${{ steps.all-changes.outputs.any_changed }} build-changes: ${{ steps.core-changed-files.outputs.build_any_changed }} ci-changes: ${{ steps.ci-changed-files.outputs.ci_any_changed }} doc-changes: ${{ steps.core-changed-files.outputs.docs_any_changed }} src-changes: ${{ steps.core-changed-files.outputs.src_any_changed }} test-changes: ${{ steps.core-changed-files.outputs.tests_any_changed }} gha-src-changes: ${{ steps.core-changed-files.outputs.gha_src_any_changed }} gha-test-changes: ${{ steps.core-changed-files.outputs.gha_tests_any_changed }} validate: uses: ./.github/workflows/validate.yml needs: eval-changes concurrency: group: ${{ github.workflow }}-validate-${{ github.ref_name }} cancel-in-progress: true with: # It was a bit of overkill before testing every minor version, and since this project is all about # SemVer, we should expect Python to adhere to that model to. Therefore Only test across 2 OS's but # the lowest supported minor version and the latest stable minor version. python-versions-linux: '["3.8", "3.13"]' python-versions-windows: '["3.8", "3.13"]' files-changed: ${{ needs.eval-changes.outputs.any-file-changes }} build-files-changed: ${{ needs.eval-changes.outputs.build-changes }} ci-files-changed: ${{ needs.eval-changes.outputs.ci-changes }} doc-files-changed: ${{ needs.eval-changes.outputs.doc-changes }} src-files-changed: ${{ needs.eval-changes.outputs.src-changes }} test-files-changed: ${{ needs.eval-changes.outputs.test-changes }} gha-src-files-changed: ${{ needs.eval-changes.outputs.gha-src-changes }} gha-test-files-changed: ${{ needs.eval-changes.outputs.gha-test-changes }} permissions: {} secrets: {} release: name: Semantic Release runs-on: ubuntu-latest needs: validate if: ${{ needs.validate.outputs.new-release-detected == 'true' }} concurrency: group: ${{ github.workflow }}-release-${{ github.ref_name }} cancel-in-progress: false permissions: contents: write env: GITHUB_ACTIONS_AUTHOR_NAME: github-actions GITHUB_ACTIONS_AUTHOR_EMAIL: actions@users.noreply.github.com steps: # Note: We checkout the repository at the branch that triggered the workflow # with the entire history to ensure to match PSR's release branch detection # and history evaluation. # However, we forcefully reset the branch to the workflow sha because it is # possible that the branch was updated while the workflow was running. This # prevents accidentally releasing un-evaluated changes. - name: Setup | Checkout Repository on Release Branch uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: ref: ${{ github.ref_name }} fetch-depth: 0 - name: Setup | Force release branch to be at workflow sha run: | git reset --hard ${{ github.sha }} - name: Setup | Download Build Artifacts uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 id: artifact-download with: name: ${{ needs.validate.outputs.distribution-artifacts }} path: dist - name: Release | Bump Version in Docs if: needs.validate.outputs.new-release-is-prerelease == 'false' env: NEW_VERSION: ${{ needs.validate.outputs.new-release-version }} NEW_RELEASE_TAG: ${{ needs.validate.outputs.new-release-tag }} run: | python -m scripts.bump_version_in_docs git add docs/* - name: Evaluate | Verify upstream has NOT changed # Last chance to abort before causing an error as another PR/push was applied to the upstream branch # while this workflow was running. This is important because we are committing a version change shell: bash run: bash .github/workflows/verify_upstream.sh - name: Release | Python Semantic Release id: release uses: python-semantic-release/python-semantic-release@6df5e876c8682fe0753ec2f8c81eb45547e52747 # v10.4.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} verbosity: 1 build: false - name: Release | Add distribution artifacts to GitHub Release Assets uses: python-semantic-release/publish-action@4681bbe581b99f950d7b6f14599870b6a117fdc1 # v10.4.0 if: steps.release.outputs.released == 'true' with: github_token: ${{ secrets.GITHUB_TOKEN }} tag: ${{ steps.release.outputs.tag }} - name: Release | Update Minor Release Tag Reference if: steps.release.outputs.released == 'true' && steps.release.outputs.is_prerelease == 'false' env: FULL_VERSION_TAG: ${{ steps.release.outputs.tag }} GIT_COMMITTER_NAME: ${{ env.GITHUB_ACTIONS_AUTHOR_NAME }} GIT_COMMITTER_EMAIL: ${{ env.GITHUB_ACTIONS_AUTHOR_EMAIL }} run: | MINOR_VERSION_TAG="$(echo "$FULL_VERSION_TAG" | cut -d. -f1,2)" git tag --force --annotate "$MINOR_VERSION_TAG" "${FULL_VERSION_TAG}^{}" -m "$MINOR_VERSION_TAG" git push -u origin "$MINOR_VERSION_TAG" --force - name: Release | Update Major Release Tag Reference if: steps.release.outputs.released == 'true' && steps.release.outputs.is_prerelease == 'false' env: FULL_VERSION_TAG: ${{ steps.release.outputs.tag }} GIT_COMMITTER_NAME: ${{ env.GITHUB_ACTIONS_AUTHOR_NAME }} GIT_COMMITTER_EMAIL: ${{ env.GITHUB_ACTIONS_AUTHOR_EMAIL }} run: | MAJOR_VERSION_TAG="$(echo "$FULL_VERSION_TAG" | cut -d. -f1)" git tag --force --annotate "$MAJOR_VERSION_TAG" "${FULL_VERSION_TAG}^{}" -m "$MAJOR_VERSION_TAG" git push -u origin "$MAJOR_VERSION_TAG" --force outputs: released: ${{ steps.release.outputs.released || 'false' }} new-release-version: ${{ steps.release.outputs.version }} new-release-tag: ${{ steps.release.outputs.tag }} deploy: name: Deploy runs-on: ubuntu-latest if: ${{ needs.release.outputs.released == 'true' && github.repository == 'python-semantic-release/python-semantic-release' }} needs: - validate - release environment: name: pypi url: https://pypi.org/project/python-semantic-release/ permissions: # https://docs.github.com/en/rest/overview/permissions-required-for-github-apps?apiVersion=2022-11-28#metadata id-token: write # needed for PyPI upload steps: - name: Setup | Download Build Artifacts uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 id: artifact-download with: name: ${{ needs.validate.outputs.distribution-artifacts }} path: dist # see https://docs.pypi.org/trusted-publishers/ - name: Publish package distributions to PyPI id: pypi-publish uses: pypa/gh-action-pypi-publish@v1.13.0 with: packages-dir: dist print-hash: true verbose: true python-semantic-release-10.4.1/.github/workflows/manual.yml000066400000000000000000000107051506116242600237400ustar00rootroot00000000000000name: CI (Manual) on: # Enable execution directly from Actions page workflow_dispatch: inputs: linux: description: 'Test on Linux?' type: boolean required: true default: true windows: description: 'Test on Windows?' type: boolean required: true default: true python3-13: description: 'Test Python 3.13?' type: boolean required: true default: true python3-12: description: 'Test Python 3.12?' type: boolean required: true default: true python3-11: description: 'Test Python 3.11?' type: boolean required: true default: true python3-10: description: 'Test Python 3.10?' type: boolean required: true default: true python3-9: description: 'Test Python 3.9?' type: boolean required: true default: true python3-8: description: 'Test Python 3.8?' type: boolean required: true default: true # default token permissions = none permissions: {} env: COMMON_PYTHON_VERSION: '3.11' jobs: eval-input: name: Evaluate inputs runs-on: ubuntu-latest steps: - name: Setup | Install Python ${{ env.COMMON_PYTHON_VERSION }} uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: ${{ env.COMMON_PYTHON_VERSION }} - name: Setup | Write file uses: DamianReeves/write-file-action@6929a9a6d1807689191dcc8bbe62b54d70a32b42 #v1.3 with: path: .github/manual_eval_input.py write-mode: overwrite contents: | import json, os version_list = list(filter(None, [ "3.8" if str(os.getenv("INPUT_PY3_8", False)).lower() == str(True).lower() else None, "3.9" if str(os.getenv("INPUT_PY3_9", False)).lower() == str(True).lower() else None, "3.10" if str(os.getenv("INPUT_PY3_10", False)).lower() == str(True).lower() else None, "3.11" if str(os.getenv("INPUT_PY3_11", False)).lower() == str(True).lower() else None, "3.12" if str(os.getenv("INPUT_PY3_12", False)).lower() == str(True).lower() else None, "3.13" if str(os.getenv("INPUT_PY3_13", False)).lower() == str(True).lower() else None, ])) linux_versions = ( version_list if str(os.getenv("INPUT_LINUX", False)).lower() == str(True).lower() else [] ) windows_versions = ( version_list if str(os.getenv("INPUT_WINDOWS", False)).lower() == str(True).lower() else [] ) print(f"PYTHON_VERSIONS_LINUX={json.dumps(linux_versions)}") print(f"PYTHON_VERSIONS_WINDOWS={json.dumps(windows_versions)}") - name: Evaluate | Generate Test Matrix id: test-matrix env: INPUT_PY3_8: ${{ inputs.python3-8 }} INPUT_PY3_9: ${{ inputs.python3-9 }} INPUT_PY3_10: ${{ inputs.python3-10 }} INPUT_PY3_11: ${{ inputs.python3-11 }} INPUT_PY3_12: ${{ inputs.python3-12 }} INPUT_PY3_13: ${{ inputs.python3-13 }} INPUT_LINUX: ${{ inputs.linux }} INPUT_WINDOWS: ${{ inputs.windows }} run: | if ! vars="$(python3 .github/manual_eval_input.py)"; then printf '%s\n' "::error::Failed to evaluate input" exit 1 fi printf '%s\n' "$vars" printf '%s\n' "$vars" >> $GITHUB_OUTPUT outputs: python-versions-linux: ${{ steps.test-matrix.outputs.PYTHON_VERSIONS_LINUX }} python-versions-windows: ${{ steps.test-matrix.outputs.PYTHON_VERSIONS_WINDOWS }} validate: needs: eval-input uses: ./.github/workflows/validate.yml with: python-versions-linux: ${{ needs.eval-input.outputs.python-versions-linux }} python-versions-windows: ${{ needs.eval-input.outputs.python-versions-windows }} # There is no way to check for file changes on a manual workflow so # we just assume everything has changed build-files-changed: true ci-files-changed: true doc-files-changed: true src-files-changed: true test-files-changed: true gha-src-files-changed: true gha-test-files-changed: true files-changed: true permissions: {} secrets: {} python-semantic-release-10.4.1/.github/workflows/stale.yml000066400000000000000000000130461506116242600235740ustar00rootroot00000000000000name: 'Stale Bot' on: schedule: # Execute Daily at 7:15 AM UTC - cron: '15 7 * * *' # Default token permissions = None permissions: {} jobs: stale: runs-on: ubuntu-latest permissions: contents: read issues: write pull-requests: write actions: write # required to delete/update cache env: STALE_ISSUE_WARNING_DAYS: 90 STALE_ISSUE_CLOSURE_DAYS: 7 STALE_PR_WARNING_DAYS: 60 STALE_PR_CLOSURE_DAYS: 10 UNRESPONSIVE_WARNING_DAYS: 14 UNRESPONSIVE_CLOSURE_DAYS: 7 REMINDER_WINDOW: 90 OPERATIONS_RATE_LIMIT: 330 # 1000 api/hr / 3 jobs steps: - name: Stale Issues/PRs uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0 with: # default: 30, GitHub Actions API Rate limit is 1000/hr operations-per-run: ${{ env.OPERATIONS_RATE_LIMIT }} # exempt-all-milestones: false (default) # exempt-all-assignees: false (default) stale-issue-label: stale days-before-issue-stale: ${{ env.STALE_ISSUE_WARNING_DAYS }} days-before-issue-close: ${{ env.STALE_ISSUE_CLOSURE_DAYS }} exempt-issue-labels: confirmed, help-wanted, info stale-issue-message: > This issue is stale because it has not been confirmed or planned by the maintainers and has been open ${{ env.STALE_ISSUE_WARNING_DAYS }} days with no recent activity. It will be closed in ${{ env.STALE_ISSUE_CLOSURE_DAYS }} days, if no further activity occurs. Thank you for your contributions. close-issue-message: > This issue was closed due to lack of activity. # PR Configurations stale-pr-label: stale days-before-pr-stale: ${{ env.STALE_PR_WARNING_DAYS }} days-before-pr-close: ${{ env.STALE_PR_CLOSURE_DAYS }} exempt-pr-labels: confirmed, dependabot stale-pr-message: > This PR is stale because it has not been confirmed or considered ready for merge by the maintainers but has been open ${{ env.STALE_PR_WARNING_DAYS }} days with no recent activity. It will be closed in ${{ env.STALE_PR_CLOSURE_DAYS }} days, if no further activity occurs. Please make sure to add the proper testing, docs, and descriptions of changes before your PR can be merged. Thank you for your contributions. close-pr-message: > This PR was closed due to lack of activity. - name: Unresponsive Issues/PRs # Closes issues rapidly when submitter is unresponsive. The timer is initiated # by maintainer by placing the awaiting-reply label on the issue or PR. From # that point the submitter has 14 days before a reminder/warning is given. If # no response has been received within 3 weeks, the issue is closed. There are # no exemptions besides removing the awaiting-reply label. uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0 with: # GitHub Actions API Rate limit is 1000/hr operations-per-run: ${{ env.OPERATIONS_RATE_LIMIT }} only-labels: awaiting-reply stale-issue-label: unresponsive stale-pr-label: unresponsive remove-stale-when-updated: awaiting-reply days-before-stale: ${{ env.UNRESPONSIVE_WARNING_DAYS }} days-before-close: ${{ env.UNRESPONSIVE_CLOSURE_DAYS }} stale-issue-message: > This issue has not received a response in ${{ env.UNRESPONSIVE_WARNING_DAYS }} days. If no response is received in ${{ env.UNRESPONSIVE_CLOSURE_DAYS }} days, it will be closed. We look forward to hearing from you. close-issue-message: > This issue was closed because no response was received. stale-pr-message: > This PR has not received a response in ${{ env.UNRESPONSIVE_WARNING_DAYS }} days. If no response is received in ${{ env.UNRESPONSIVE_CLOSURE_DAYS }} days, it will be closed. We look forward to hearing from you. close-pr-message: > This PR was closed because no response was received. - name: Reminders on Confirmed Issues/PRs # Posts a reminder when confirmed issues are not updated in a timely manner. # The timer is initiated by a maintainer by placing the confirmed label on # the issue or PR (which prevents stale closure), however, to prevent it being # forgotten completely, this job will post a reminder message to the maintainers # No closures will occur and there are no exemptions besides removing the confirmed # label. uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0 with: # GitHub Actions API Rate limit is 1000/hr operations-per-run: ${{ env.OPERATIONS_RATE_LIMIT }} only-labels: confirmed stale-issue-label: needs-update stale-pr-label: needs-update days-before-stale: ${{ env.REMINDER_WINDOW }} days-before-close: -1 # never close stale-issue-message: > It has been ${{ env.REMINDER_WINDOW }} days since the last update on this confirmed issue. @python-semantic-release/team can you provide an update on the status of this issue? stale-pr-message: > It has been ${{ env.REMINDER_WINDOW }} days since the last update on this confirmed PR. @python-semantic-release/team can you provide an update on the status of this PR? python-semantic-release-10.4.1/.github/workflows/validate.yml000066400000000000000000000436461506116242600242660ustar00rootroot00000000000000--- name: Validation Pipeline on: # Enable workflow as callable from another workflow workflow_call: inputs: python-versions-linux: description: 'Python versions to test on Linux (JSON array)' required: true type: string python-versions-windows: description: 'Python versions to test on Windows (JSON array)' required: true type: string files-changed: description: 'Boolean string result for if any files have changed' type: string required: false default: 'false' build-files-changed: description: 'Boolean string result for if build files have changed' type: string required: false default: 'false' ci-files-changed: description: 'Boolean string result for if CI files have changed' type: string required: false default: 'false' doc-files-changed: description: 'Boolean string result for if documentation files have changed' type: string required: false default: 'false' src-files-changed: description: 'Boolean string result for if source files have changed' type: string required: false default: 'false' test-files-changed: description: 'Boolean string result for if test files have changed' type: string required: false default: 'false' gha-src-files-changed: description: 'Boolean string result for if GitHub Action source files have changed' type: string required: false default: 'false' gha-test-files-changed: description: 'Boolean string result for if GitHub Action test files have changed' type: string required: false default: 'false' outputs: new-release-detected: description: Boolean string result for if new release is available value: ${{ jobs.build.outputs.new-release-detected }} new-release-version: description: Version string for the new release value: ${{ jobs.build.outputs.new-release-version }} new-release-tag: description: Tag string for the new release value: ${{ jobs.build.outputs.new-release-tag }} new-release-is-prerelease: description: Boolean string result for if new release is a pre-release value: ${{ jobs.build.outputs.new-release-is-prerelease }} distribution-artifacts: description: Artifact Download name for the distribution artifacts value: ${{ jobs.build.outputs.distribution-artifacts }} # secrets: none required ATT # set default Token permissions = none permissions: {} env: LOWEST_PYTHON_VERSION: '3.8' COMMON_PYTHON_VERSION: '3.11' jobs: build: name: Build runs-on: ubuntu-latest if: ${{ inputs.build-files-changed == 'true' || inputs.src-files-changed == 'true' || inputs.test-files-changed == 'true' || inputs.ci-files-changed == 'true' }} steps: - name: Setup | Checkout Repository at workflow sha uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: ref: ${{ github.sha }} fetch-depth: 0 - name: Setup | Force correct release branch on workflow sha run: | git checkout -B ${{ github.ref_name }} - name: Setup | Install Python ${{ env.COMMON_PYTHON_VERSION }} uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: ${{ env.COMMON_PYTHON_VERSION }} cache: 'pip' - name: Setup | Install dependencies run: | python -m pip install --upgrade pip setuptools wheel pip install -e .[build] - name: Build | Build next version artifacts id: version uses: python-semantic-release/python-semantic-release@6df5e876c8682fe0753ec2f8c81eb45547e52747 # v10.4.0 with: github_token: "" verbosity: 1 build: true changelog: true commit: false push: false tag: false vcs_release: false - name: Build | Annotate next version if: steps.version.outputs.released == 'true' run: | printf '%s\n' "::notice::Next release will be '${{ steps.version.outputs.tag }}'" - name: Build | Create non-versioned distribution artifact if: steps.version.outputs.released == 'false' run: python -m build . - name: Build | Set distribution artifact variables id: build run: | printf '%s\n' "dist_dir=dist/*" >> $GITHUB_OUTPUT printf '%s\n' "artifacts_name=dist" >> $GITHUB_OUTPUT - name: Upload | Distribution Artifacts uses: actions/upload-artifact@v4 with: name: ${{ steps.build.outputs.artifacts_name }} path: ${{ steps.build.outputs.dist_dir }} if-no-files-found: error retention-days: 2 outputs: new-release-detected: ${{ steps.version.outputs.released }} new-release-version: ${{ steps.version.outputs.version }} new-release-tag: ${{ steps.version.outputs.tag }} new-release-is-prerelease: ${{ steps.version.outputs.is_prerelease }} distribution-artifacts: ${{ steps.build.outputs.artifacts_name }} unit-test: name: Unit Tests if: ${{ inputs.src-files-changed == 'true' || inputs.test-files-changed == 'true' || inputs.ci-files-changed == 'true' }} runs-on: ubuntu-latest steps: - name: Setup | Checkout Repository uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: ref: ${{ github.sha }} fetch-depth: 1 - name: Setup | Install Python ${{ env.LOWEST_PYTHON_VERSION }} uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: ${{ env.LOWEST_PYTHON_VERSION }} cache: 'pip' - name: Setup | Install dependencies run: | python -m pip install --upgrade pip setuptools wheel pip install -e .[test] pip install pytest-github-actions-annotate-failures - name: Test | Run pytest -m unit --comprehensive id: tests env: COLUMNS: 150 run: | pytest \ -vv \ -nauto \ -m unit \ --comprehensive \ --cov=semantic_release \ --cov-context=test \ --cov-report=term-missing \ --cov-fail-under=60 \ --junit-xml=tests/reports/pytest-results.xml - name: Report | Upload Test Results uses: mikepenz/action-junit-report@3585e9575db828022551b4231f165eb59a0e74e3 # v5.6.2 if: ${{ always() && steps.tests.outcome != 'skipped' }} with: report_paths: ./tests/reports/*.xml annotate_only: true test-linux: name: Python ${{ matrix.python-version }} on ${{ matrix.os }} E2E tests runs-on: ${{ matrix.os }} needs: - build - unit-test if: ${{ inputs.src-files-changed == 'true' || inputs.test-files-changed == 'true' || inputs.ci-files-changed == 'true' }} strategy: matrix: python-version: ${{ fromJson(inputs.python-versions-linux) }} os: - ubuntu-latest steps: - name: Setup | Checkout Repository uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: ref: ${{ github.sha }} fetch-depth: 1 - name: Setup | Install Python ${{ matrix.python-version }} uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: ${{ matrix.python-version }} cache: 'pip' - name: Setup | Download Distribution Artifacts uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 with: name: ${{ needs.build.outputs.distribution-artifacts }} path: ./dist - name: Setup | Install dependencies id: install # To ensure we are testing our installed package (not the src code), we must # uninstall the editable install (symlink) first then install the distribution artifact. # Lastly, we ask python to give us the installation location of our distribution artifact # so that we can use it in the pytest command for coverage run: | python -m pip install --upgrade pip setuptools wheel pip install -e .[test] pip install pytest-github-actions-annotate-failures pip uninstall -y python-semantic-release pip install dist/python_semantic_release-*.whl python -c 'import pathlib, semantic_release; print(f"PKG_INSTALLED_DIR={pathlib.Path(semantic_release.__file__).resolve().parent}")' >> $GITHUB_OUTPUT - name: Test | Run pytest -m e2e --comprehensive id: tests env: COLUMNS: 150 run: | pytest \ -vv \ -nauto \ -m e2e \ --comprehensive \ --cov=${{ steps.install.outputs.PKG_INSTALLED_DIR }} \ --cov-context=test \ --cov-report=term-missing \ --cov-fail-under=70 \ --junit-xml=tests/reports/pytest-results.xml - name: Report | Upload Cached Repos on Failure uses: actions/upload-artifact@v4 if: ${{ failure() && steps.tests.outcome == 'failure' }} with: name: ${{ format('cached-repos-{0}-{1}', matrix.os, matrix.python-version) }} path: .pytest_cache/d/psr-* include-hidden-files: true if-no-files-found: error retention-days: 1 - name: Report | Upload Tested Repos on Failure uses: actions/upload-artifact@v4 if: ${{ failure() && steps.tests.outcome == 'failure' }} with: name: ${{ format('tested-repos-{0}-{1}', matrix.os, matrix.python-version) }} path: /tmp/pytest-of-runner/pytest-current/* include-hidden-files: true if-no-files-found: error retention-days: 1 - name: Report | Upload Test Results uses: mikepenz/action-junit-report@3585e9575db828022551b4231f165eb59a0e74e3 # v5.6.2 if: ${{ always() && steps.tests.outcome != 'skipped' }} with: report_paths: ./tests/reports/*.xml annotate_only: true test-windows: name: Python ${{ matrix.python-version }} on ${{ matrix.os }} E2E tests runs-on: ${{ matrix.os }} needs: - build - unit-test if: ${{ inputs.src-files-changed == 'true' || inputs.test-files-changed == 'true' || inputs.ci-files-changed == 'true' }} strategy: matrix: python-version: ${{ fromJson(inputs.python-versions-windows) }} os: [windows-latest] steps: - name: Setup | Checkout Repository uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: ref: ${{ github.sha }} fetch-depth: 1 - name: Setup | Install Python ${{ matrix.python-version }} uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: ${{ matrix.python-version }} cache: 'pip' - name: Setup | Download Distribution Artifacts uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 with: name: ${{ needs.build.outputs.distribution-artifacts }} path: dist - name: Setup | Install dependencies id: install # To ensure we are testing our installed package (not the src code), we must # uninstall the editable install (symlink) first then install the distribution artifact. # Lastly, we ask python to give us the installation location of our distribution artifact # so that we can use it in the pytest command for coverage shell: pwsh run: | $ErrorActionPreference = 'stop' python -m pip install --upgrade pip setuptools wheel pip install -e .[test] pip install pytest-github-actions-annotate-failures pip uninstall -y python-semantic-release $psrWheelFile = Get-ChildItem dist\python_semantic_release-*.whl -File | Select-Object -Index 0 pip install "$psrWheelFile" python -c 'import pathlib, semantic_release; print(f"PKG_INSTALLED_DIR={pathlib.Path(semantic_release.__file__).resolve().parent}")' | Tee-Object -Variable cmdOutput echo $cmdOutput >> $env:GITHUB_OUTPUT - name: Test | Run pytest -m e2e id: tests shell: pwsh # env: # Required for GitPython to work on Windows because of getpass.getuser() # USERNAME: "runneradmin" # COLUMNS: 150 # Because GHA is currently broken on Windows to pass these varables, we do it manually run: | $env:USERNAME = "runneradmin" $env:COLUMNS = 150 pytest ` -vv ` -nauto ` -m e2e ` `--cov=${{ steps.install.outputs.PKG_INSTALLED_DIR }} ` `--cov-context=test ` `--cov-report=term-missing ` `--junit-xml=tests/reports/pytest-results.xml - name: Report | Upload Cached Repos on Failure uses: actions/upload-artifact@v4 if: ${{ failure() && steps.tests.outcome == 'failure' }} with: name: ${{ format('cached-repos-{0}-{1}', matrix.os, matrix.python-version) }} path: .pytest_cache/d/psr-* include-hidden-files: true if-no-files-found: error retention-days: 1 - name: Report | Upload Tested Repos on Failure uses: actions/upload-artifact@v4 if: ${{ failure() && steps.tests.outcome == 'failure' }} with: name: ${{ format('tested-repos-{0}-{1}', matrix.os, matrix.python-version) }} path: ~/AppData/Local/Temp/pytest-of-runneradmin/pytest-current/* include-hidden-files: true if-no-files-found: error retention-days: 1 - name: Report | Upload Test Results uses: mikepenz/action-junit-report@3585e9575db828022551b4231f165eb59a0e74e3 # v5.6.2 if: ${{ always() && steps.tests.outcome != 'skipped' }} with: report_paths: ./tests/reports/*.xml annotate_only: true test-gh-action: name: Validate Action Build & Execution runs-on: ubuntu-latest if: ${{ inputs.gha-src-files-changed == 'true' || inputs.gha-test-files-changed == 'true' || inputs.ci-files-changed == 'true' }} needs: - build - unit-test env: TEST_CONTAINER_TAG: psr-action:latest ACTION_SRC_DIR: src/gh_action steps: - name: Setup | Checkout Repository uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: fetch-depth: 1 ref: ${{ github.sha }} - name: Setup | Download Distribution Artifacts uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 with: name: ${{ needs.build.outputs.distribution-artifacts }} path: ${{ env.ACTION_SRC_DIR }} - name: Setup | Update Dependency list with latest version working-directory: ${{ env.ACTION_SRC_DIR }} run: | find . -name '*.whl' > requirements.txt - name: Setup | Allow Docker build to include wheel files working-directory: ${{ env.ACTION_SRC_DIR }} run: | printf '%s\n' "!*.whl" >> .dockerignore - name: Build | Action Container id: container-builder uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 with: context: ${{ env.ACTION_SRC_DIR }} load: true # add to `docker images` push: false platforms: linux/amd64 tags: ${{ env.TEST_CONTAINER_TAG }} - name: Test | Action Container run: bash tests/gh_action/run.sh lint: name: Lint if: ${{ inputs.files-changed == 'true' }} runs-on: ubuntu-latest steps: - name: Setup | Checkout Repository uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: ref: ${{ github.sha }} fetch-depth: 1 - name: Setup | Install Python ${{ env.COMMON_PYTHON_VERSION }} uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: ${{ env.COMMON_PYTHON_VERSION }} cache: 'pip' - name: Setup | Install dependencies run: | python -m pip install --upgrade pip setuptools wheel pip install -e .[dev,mypy,test] # needs test because we run mypy over the tests as well and without the dependencies # mypy will throw import errors - name: Lint | Ruff Evaluation id: lint run: | ruff check \ --config pyproject.toml \ --output-format=full \ --exit-non-zero-on-fix - name: Type-Check | MyPy Evaluation id: type-check if: ${{ always() && steps.lint.outcome != 'skipped' }} run: | mypy . - name: Format-Check | Ruff Evaluation id: format-check if: ${{ always() && steps.type-check.outcome != 'skipped' }} run: | ruff format --check --config pyproject.toml python-semantic-release-10.4.1/.github/workflows/verify_upstream.sh000066400000000000000000000017431506116242600255220ustar00rootroot00000000000000#!/bin/bash set -eu +o pipefail # Example output of `git status -sb`: # ## master...origin/master [behind 1] # M .github/workflows/verify_upstream.sh UPSTREAM_BRANCH_NAME="$(git status -sb | head -n 1 | awk -F '\\.\\.\\.' '{print $2}' | cut -d ' ' -f1)" printf '%s\n' "Upstream branch name: $UPSTREAM_BRANCH_NAME" set -o pipefail if [ -z "$UPSTREAM_BRANCH_NAME" ]; then printf >&2 '%s\n' "::error::Unable to determine upstream branch name!" exit 1 fi git fetch "${UPSTREAM_BRANCH_NAME%%/*}" if ! UPSTREAM_SHA="$(git rev-parse "$UPSTREAM_BRANCH_NAME")"; then printf >&2 '%s\n' "::error::Unable to determine upstream branch sha!" exit 1 fi HEAD_SHA="$(git rev-parse HEAD)" if [ "$HEAD_SHA" != "$UPSTREAM_SHA" ]; then printf >&2 '%s\n' "[HEAD SHA] $HEAD_SHA != $UPSTREAM_SHA [UPSTREAM SHA]" printf >&2 '%s\n' "::error::Upstream has changed, aborting release..." exit 1 fi printf '%s\n' "Verified upstream branch has not changed, continuing with release..." python-semantic-release-10.4.1/.gitignore000066400000000000000000000014671506116242600203400ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python .venv env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg venv/ # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # PyCharm .idea *.iml # VSCode .vscode/ # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ docs/api/modules/ # PyBuilder target/ .pytest_cache .mypy_cache .python-version *.swp python-semantic-release-10.4.1/.pre-commit-config.yaml000066400000000000000000000050661506116242600226300ustar00rootroot00000000000000--- default_language_version: python: python3 exclude: "^CHANGELOG.md$" repos: # Meta hooks - repo: meta hooks: - id: check-hooks-apply - id: check-useless-excludes # Security & credential scanning/alerting - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.6.0 hooks: - id: debug-statements - id: detect-aws-credentials args: ["--allow-missing-credentials"] - id: detect-private-key - id: check-builtin-literals - id: check-yaml - id: check-toml - id: check-case-conflict - id: end-of-file-fixer - id: trailing-whitespace - id: check-merge-conflict - id: mixed-line-ending - id: check-ast - repo: https://github.com/asottile/pyupgrade rev: v3.16.0 hooks: - id: pyupgrade args: ["--py38-plus", "--keep-runtime-typing"] # Linters and validation - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.6.1 hooks: - id: ruff name: ruff (lint) args: - "--fix" - "--exit-non-zero-on-fix" - "--statistics" - "--output-format=full" - id: ruff-format name: ruff (format) - repo: https://github.com/pre-commit/mirrors-mypy rev: "v1.16.1" hooks: - id: mypy additional_dependencies: - "pydantic>=2,<3" - "types-requests" log_file: "mypy.log" files: "^(src|tests)/.*" pass_filenames: false - repo: https://github.com/pre-commit/pygrep-hooks rev: v1.10.0 hooks: # - id: python-use-type-annotations - id: python-check-blanket-noqa - id: python-check-mock-methods - id: python-no-eval - id: python-no-log-warn - id: rst-backticks - id: rst-directive-colons - id: rst-inline-touching-normal - repo: https://github.com/jendrikseipp/vulture rev: "v2.11" hooks: - id: vulture args: - --min-confidence - "100" - --sort-by-size - "semantic_release" - "tests" - repo: https://github.com/pycqa/bandit rev: 1.7.8 hooks: - id: bandit args: - "-c" - "pyproject.toml" - "--quiet" - "src/" # Needed if using pyproject.toml for config additional_dependencies: ["bandit[toml]"] pass_filenames: false # GHA linting - repo: https://github.com/python-jsonschema/check-jsonschema rev: "0.28.0" hooks: - id: check-github-workflows - id: check-readthedocs - id: check-dependabot python-semantic-release-10.4.1/.readthedocs.yml000066400000000000000000000003561506116242600214320ustar00rootroot00000000000000--- version: 2 sphinx: configuration: docs/conf.py builder: html formats: - htmlzip python: install: - method: pip path: . extra_requirements: - docs build: os: "ubuntu-22.04" tools: python: "3" python-semantic-release-10.4.1/CHANGELOG.rst000066400000000000000000007060341506116242600203730ustar00rootroot00000000000000.. _changelog: ========= CHANGELOG ========= .. _changelog-v10.4.1: v10.4.1 (2025-09-13) ==================== 🪲 Bug Fixes ------------ * **cmd-version**: Fix error where ``--no-tag`` is not respected, closes `#1304`_ (`PR#1329`_, `b090fa2`_) 📖 Documentation ---------------- * **CHANGELOG**: Update hyperlink in v10.4.0's additional info paragraph (`PR#1323`_, `98ef722`_) * **getting-started-guide**: Remove notice about lack of monorepo support, closes `#1326`_ (`PR#1327`_, `3f21f3f`_) * **github-actions**: Fix recommended upstream detection script's upstream name parsing (`PR#1328`_, `ccc91c0`_) .. _#1304: https://github.com/python-semantic-release/python-semantic-release/issues/1304 .. _#1326: https://github.com/python-semantic-release/python-semantic-release/issues/1326 .. _3f21f3f: https://github.com/python-semantic-release/python-semantic-release/commit/3f21f3fc47a0dacc11ec95feb2a23f8cf132e77b .. _98ef722: https://github.com/python-semantic-release/python-semantic-release/commit/98ef722b65bd6a37492cf7ec8b0425800f719114 .. _b090fa2: https://github.com/python-semantic-release/python-semantic-release/commit/b090fa2efc0ebfb40bdc572fea307d356af95a3f .. _ccc91c0: https://github.com/python-semantic-release/python-semantic-release/commit/ccc91c09fab45358c7e52b42e6c0607c68c9d8f3 .. _PR#1323: https://github.com/python-semantic-release/python-semantic-release/pull/1323 .. _PR#1327: https://github.com/python-semantic-release/python-semantic-release/pull/1327 .. _PR#1328: https://github.com/python-semantic-release/python-semantic-release/pull/1328 .. _PR#1329: https://github.com/python-semantic-release/python-semantic-release/pull/1329 .. _changelog-v10.4.0: v10.4.0 (2025-09-08) ==================== ✨ Features ----------- * **config**: Add ``conventional-monorepo`` as valid ``commit_parser`` type (`PR#1143`_, `e18f866`_) * **parser**: Add new conventional-commits standard parser for monorepos, closes `#614`_ (`PR#1143`_, `e18f866`_) 📖 Documentation ---------------- * Add configuration guide for monorepo use with PSR (`PR#1143`_, `e18f866`_) * **commit-parsers**: Introduce conventional commit monorepo parser options & features (`PR#1143`_, `e18f866`_) * **configuration**: Update ``commit_parser`` option with new ``conventional-monorepo`` value (`PR#1143`_, `e18f866`_) 💡 Additional Release Information --------------------------------- * **config**: This release introduces a new built-in parser type that can be utilized for monorepo projects. The type value is ``conventional-monorepo`` and when specified it will apply the conventional commit parser to a monorepo environment. This parser has specialized options to help handle monorepo projects as well. For more information, please refer to the `Monorepo Docs`_. .. _#614: https://github.com/python-semantic-release/python-semantic-release/issues/614 .. _e18f866: https://github.com/python-semantic-release/python-semantic-release/commit/e18f86640a78b374a327848b9e2ba868003d1a43 .. _Monorepo Docs: /configuration/configuration-guides/monorepos.html .. _PR#1143: https://github.com/python-semantic-release/python-semantic-release/pull/1143 .. _changelog-v10.3.2: v10.3.2 (2025-09-06) ==================== 🪲 Bug Fixes ------------ * **cmd-version**: Prevent errors when PSR is executed in non-GitHub CI environments, closes `#1315`_ (`PR#1322`_, `4df4be4`_) âš¡ Performance Improvements --------------------------- * **cmd-version**: Re-order operations for faster parsing in version determination (`PR#1310`_, `63e435b`_) 📖 Documentation ---------------- * **uv-integration**: Add ``--no-changelog`` to build step to increase job speed (`PR#1316`_, `e1aece1`_) 💡 Additional Release Information --------------------------------- * **cmd-version**: Unfortunately, PSR introduced a bug in 10.3.0 when attempting to provide more CI outputs for GitHub Actions. It required our GitHub client interface to be loaded and even if it was not using GitHub CI to be run. This caused errors in Gitea and likely GitLab/Bitbucket environments. This change prevents that from happening but if any users pipelines were intentionally presenting the environment variable "GITHUB_OUTPUT" to enable action output to enable passing along internal outputs of PSR then their hack will no longer work after this change. .. _#1315: https://github.com/python-semantic-release/python-semantic-release/issues/1315 .. _4df4be4: https://github.com/python-semantic-release/python-semantic-release/commit/4df4be465710e3b31ba65487069eccef1eeb8be1 .. _63e435b: https://github.com/python-semantic-release/python-semantic-release/commit/63e435ba466e1e980b9680d0f759950e5e598a61 .. _e1aece1: https://github.com/python-semantic-release/python-semantic-release/commit/e1aece18ae1998b1523be65b1e569837a7054251 .. _PR#1310: https://github.com/python-semantic-release/python-semantic-release/pull/1310 .. _PR#1316: https://github.com/python-semantic-release/python-semantic-release/pull/1316 .. _PR#1322: https://github.com/python-semantic-release/python-semantic-release/pull/1322 .. _changelog-v10.3.1: v10.3.1 (2025-08-06) ==================== 🪲 Bug Fixes ------------ * **github-actions**: Refactor the action output error checking for non-release executions, closes `#1307`_ (`PR#1308`_, `5385724`_) 📖 Documentation ---------------- * **github-actions**: Adjust docs for direct links to action example workflows, closes `#1303`_ (`PR#1309`_, `8efebe2`_) .. _#1303: https://github.com/python-semantic-release/python-semantic-release/issues/1303 .. _#1307: https://github.com/python-semantic-release/python-semantic-release/issues/1307 .. _5385724: https://github.com/python-semantic-release/python-semantic-release/commit/538572426cb30dd4d8c99cea660e290b56361f75 .. _8efebe2: https://github.com/python-semantic-release/python-semantic-release/commit/8efebe281be2deab1b203cd01d9aedf1542c4ad4 .. _PR#1308: https://github.com/python-semantic-release/python-semantic-release/pull/1308 .. _PR#1309: https://github.com/python-semantic-release/python-semantic-release/pull/1309 .. _changelog-v10.3.0: v10.3.0 (2025-08-04) ==================== ✨ Features ----------- * **github-actions**: Add ``commit_sha`` as a GitHub Actions output value, closes `#717`_ (`PR#1289`_, `39b647b`_) * **github-actions**: Add ``previous_version`` as a GitHub Actions output value (`PR#1302`_, `c0197b7`_) * **github-actions**: Add ``release_notes`` as a GitHub Actions output value (`PR#1300`_, `a3fd23c`_) * **github-actions**: Add release ``link`` as a GitHub Actions output value (`PR#1301`_, `888aea1`_) 🪲 Bug Fixes ------------ * **github-actions**: Fix variable output newlines (`PR#1300`_, `a3fd23c`_) * **util**: Fixes no-op log output when commit message contains square-brackets, closes `#1251`_ (`PR#1287`_, `f25883f`_) 📖 Documentation ---------------- * **getting-started**: Fixes ``changelog.exclude_commit_patterns`` example in startup guide, closes `#1291`_ (`PR#1292`_, `2ce2e94`_) * **github-actions**: Add description of ``commit_sha`` GitHub Action output in docs (`PR#1289`_, `39b647b`_) * **github-actions**: Add description of ``previous_release`` GitHub Action output (`PR#1302`_, `c0197b7`_) * **github-actions**: Add description of ``release_notes`` GitHub Action output (`PR#1300`_, `a3fd23c`_) * **github-actions**: Add description of release ``link`` GitHub Action output (`PR#1301`_, `888aea1`_) * **README**: Update broken links to match re-located destinations (`PR#1285`_, `f4ec792`_) .. _#1251: https://github.com/python-semantic-release/python-semantic-release/issues/1251 .. _#1291: https://github.com/python-semantic-release/python-semantic-release/issues/1291 .. _#717: https://github.com/python-semantic-release/python-semantic-release/issues/717 .. _2ce2e94: https://github.com/python-semantic-release/python-semantic-release/commit/2ce2e94e1930987a88c0a5e3d59baa7cb717f557 .. _39b647b: https://github.com/python-semantic-release/python-semantic-release/commit/39b647ba62e242342ef5a0d07cb0cfdfa7769865 .. _888aea1: https://github.com/python-semantic-release/python-semantic-release/commit/888aea1e450513ac7339c72d8b50fabdb4ac177b .. _a3fd23c: https://github.com/python-semantic-release/python-semantic-release/commit/a3fd23cb0e49f74cb4a345048609d3643a665782 .. _c0197b7: https://github.com/python-semantic-release/python-semantic-release/commit/c0197b711cfa83f5b13f9ae4f37e555b26f544d9 .. _f25883f: https://github.com/python-semantic-release/python-semantic-release/commit/f25883f8403365b787e7c3e86d2d982906804621 .. _f4ec792: https://github.com/python-semantic-release/python-semantic-release/commit/f4ec792d73acb34b8f5183ec044a301b593f16f0 .. _PR#1285: https://github.com/python-semantic-release/python-semantic-release/pull/1285 .. _PR#1287: https://github.com/python-semantic-release/python-semantic-release/pull/1287 .. _PR#1289: https://github.com/python-semantic-release/python-semantic-release/pull/1289 .. _PR#1292: https://github.com/python-semantic-release/python-semantic-release/pull/1292 .. _PR#1300: https://github.com/python-semantic-release/python-semantic-release/pull/1300 .. _PR#1301: https://github.com/python-semantic-release/python-semantic-release/pull/1301 .. _PR#1302: https://github.com/python-semantic-release/python-semantic-release/pull/1302 .. _changelog-v10.2.0: v10.2.0 (2025-06-29) ==================== ✨ Features ----------- * **cmd-version**: Adds ``PACKAGE_NAME`` value into build command environment (`db9bc13`_) 📖 Documentation ---------------- * **configuration**: Update build command environment definition to include ``PACKAGE_NAME`` variable (`4aa3805`_) * **uv-integration**: Fix configuration guide for ``uv`` usage to ensure lock file update (`5390145`_) .. _4aa3805: https://github.com/python-semantic-release/python-semantic-release/commit/4aa38059ce6b33ca23a547473e9fb8a19d3ffbe1 .. _5390145: https://github.com/python-semantic-release/python-semantic-release/commit/5390145503b4d5dcca8f323e1ba6c5bec0bd079b .. _db9bc13: https://github.com/python-semantic-release/python-semantic-release/commit/db9bc132c8a0398f2cce647730c69a32ca35ba51 .. _changelog-v10.1.0: v10.1.0 (2025-06-12) ==================== ✨ Features ----------- * **cmd-version**: Always stage version stamped files & changelog even with ``--no-commit``, closes `#1211`_ (`PR#1214`_, `de62334`_) 📖 Documentation ---------------- * **cmd-version**: Improve command description & include common uses (`PR#1214`_, `de62334`_) * **configuration-guide**: Add how-to guide for ``uv`` integration (`PR#1214`_, `de62334`_) * **github-actions**: Clarify with examples of the ``root_options`` v10 migration change (`PR#1271`_, `fbb63ec`_) âš™ï¸ Build System ---------------- * **deps**: Expand ``python-gitlab`` dependency to include ``v6.0.0`` (`PR#1273`_, `99fc9cc`_) .. _#1211: https://github.com/python-semantic-release/python-semantic-release/issues/1211 .. _99fc9cc: https://github.com/python-semantic-release/python-semantic-release/commit/99fc9ccabbae9adf5646731591080366eacbe03c .. _de62334: https://github.com/python-semantic-release/python-semantic-release/commit/de623344cd18b3dbe05823eb90fdd010c5505c92 .. _fbb63ec: https://github.com/python-semantic-release/python-semantic-release/commit/fbb63ec76142ea903d8a0401369ec251abbec0fe .. _PR#1214: https://github.com/python-semantic-release/python-semantic-release/pull/1214 .. _PR#1271: https://github.com/python-semantic-release/python-semantic-release/pull/1271 .. _PR#1273: https://github.com/python-semantic-release/python-semantic-release/pull/1273 .. _changelog-v10.0.2: v10.0.2 (2025-05-26) ==================== 🪲 Bug Fixes ------------ * **github-actions**: Add filesystem UID/GID fixer after action workspace modification (`PR#1262`_, `93e23c8`_) .. _93e23c8: https://github.com/python-semantic-release/python-semantic-release/commit/93e23c8993fe6f113095bfcd5089684f403cc6b9 .. _PR#1262: https://github.com/python-semantic-release/python-semantic-release/pull/1262 .. _changelog-v10.0.1: v10.0.1 (2025-05-25) ==================== 🪲 Bug Fixes ------------ * **github-actions**: Bump the github-actions dependency to ``v10.0.0`` (`PR#1255`_, `2803676`_) .. _2803676: https://github.com/python-semantic-release/python-semantic-release/commit/2803676cf26c52177fa98d9144934853744a22bb .. _PR#1255: https://github.com/python-semantic-release/python-semantic-release/pull/1255 .. _changelog-v10.0.0: v10.0.0 (2025-05-25) ==================== ✨ Features ----------- * **cmd-version**: Enable ``version_variables`` version stamp of vars with double-equals (`PR#1244`_, `080e4bc`_) * **parser-conventional**: Set parser to evaluate all squashed commits by default (`6fcdc99`_) * **parser-conventional**: Set parser to ignore merge commits by default (`59bf084`_) * **parser-emoji**: Set parser to evaluate all squashed commits by default (`514a922`_) * **parser-emoji**: Set parser to ignore merge commits by default (`8a51525`_) * **parser-scipy**: Set parser to evaluate all squashed commits by default (`634fffe`_) * **parser-scipy**: Set parser to ignore merge commits by default (`d4f128e`_) 🪲 Bug Fixes ------------ * **changelog-md**: Change to 1-line descriptions in markdown template, closes `#733`_ (`e7ac155`_) * **changelog-rst**: Change to 1-line descriptions in the default ReStructuredText template, closes `#733`_ (`731466f`_) * **cli**: Adjust verbosity parameter to enable silly-level logging (`bd3e7bf`_) * **github-action**: Resolve command injection vulnerability in action script (`fb3da27`_) * **parser-conventional**: Remove breaking change footer messages from commit descriptions (`b271cbb`_) * **parser-conventional**: Remove issue footer messages from commit descriptions (`b1bb0e5`_) * **parser-conventional**: Remove PR/MR references from commit subject line (`eed63fa`_) * **parser-conventional**: Remove release notice footer messages from commit descriptions (`7e8dc13`_) * **parser-emoji**: Remove issue footer messages from commit descriptions (`b757603`_) * **parser-emoji**: Remove PR/MR references from commit subject line (`16465f1`_) * **parser-emoji**: Remove release notice footer messages from commit descriptions (`b6307cb`_) * **parser-scipy**: Remove issue footer messages from commit descriptions (`3cfee76`_) * **parser-scipy**: Remove PR/MR references from commit subject line (`da4140f`_) * **parser-scipy**: Remove release notice footer messages from commit descriptions (`58308e3`_) 📖 Documentation ---------------- * Refactor documentation page navigation (`4e52f4b`_) * **algorithm**: Remove out-of-date algorithm description (`6cd0fbe`_) * **commit-parsing**: Define limitation of revert commits with the scipy parser (`5310d0c`_) * **configuration**: Change default value for ``allow_zero_version`` in the description (`203d29d`_) * **configuration**: Change the default for the base changelog's ``mask_initial_release`` value (`5fb02ab`_) * **configuration**: Change the default value for ``changelog.mode`` in the setting description (`0bed906`_) * **configuration**: Update ``version_variables`` section to include double-equals operand support (`PR#1244`_, `080e4bc`_) * **contributing**: Refactor contributing & contributors layout (`8bed5bc`_) * **github-actions**: Add reference to manual release workflow example (`6aad7f1`_) * **github-actions**: Change recommended workflow to separate release from deploy (`67b2ae0`_) * **github-actions**: Update ``python-semantic-release/publish-action`` parameter notes (`c4d45ec`_) * **github-actions**: Update PSR action parameter documentation (`a082896`_) * **upgrading**: Re-locate version upgrade guides into ``Upgrading PSR`` (`a5f5e04`_) * **upgrading-v10**: Added migration guide for v9 to v10 (`4ea92ec`_) âš™ï¸ Build System ---------------- * **deps**: Prevent update to ``click@8.2.0`` (`PR#1245`_, `4aa6a6e`_) â™»ï¸ Refactoring --------------- * **config**: Change ``allow_zero_version`` default to ``false`` (`c6b6eab`_) * **config**: Change ``changelog.default_templates.mask_initial_release`` default to ``true`` (`0e114c3`_) * **config**: Change ``changelog.mode`` default to ``update`` (`7d39e76`_) 💥 Breaking Changes ------------------- .. seealso:: *For a summarized walkthrough, check out our* |v10 migration guide|_ *as well.* .. _v10 migration guide: ../upgrading/10-upgrade.html .. |v10 migration guide| replace:: *v10 migration guide* * **changelog-md**: The default Markdown changelog template and release notes template will no longer print out the entire commit message contents, instead, it will only print the commit subject line. This comes to meet the high demand of better formatted changelogs and requests for subject line only. Originally, it was a decision to not hide commit subjects that were included in the commit body via the ``git merge --squash`` command and PSR did not have another alternative. At this point, all the built-in parsers have the ability to parse squashed commits and separate them out into their own entry on the changelog. Therefore, the default template no longer needs to write out the full commit body. See the commit parser options if you want to enable/disable parsing squash commits. * **changelog-rst**: The default ReStructured changelog template will no longer print out the entire commit message contents, instead, it will only print the commit subject line. This comes to meet the high demand of better formatted changelogs and requests for subject line only. Originally, it was a decision to not hide commit subjects that were included in the commit body via the ``git merge --squash`` command and PSR did not have another alternative. At this point, all the built-in parsers have the ability to parse squashed commits and separate them out into their own entry on the changelog. Therefore, the default template no longer needs to write out the full commit body. See the commit parser options if you want to enable/disable parsing squash commits. * **config**: This release switches the ``allow_zero_version`` default to ``false``. This change is to encourage less ``0.x`` releases as the default but rather allow the experienced developer to choose when ``0.x`` is appropriate. There are way too many projects in the ecosystems that never leave ``0.x`` and that is problematic for the industry tools that help auto-update based on SemVer. We should strive for publishing usable tools and maintaining good forethought for when compatibility must break. If your configuration already sets the ``allow_zero_version`` value, this change will have no effect on your project. If you want to use ``0.x`` versions, from the start then change ``allow_zero_version`` to ``true`` in your configuration. * **config**: This release switches the ``changelog.default_templates.mask_initial_release`` default to ``true``. This change is intended to toggle better recommended outputs of the default changelog. Conceptually, the very first release is hard to describe--one can only provide new features as nothing exists yet for the end user. No changelog should be written as there is no start point to compare the "changes" to. The recommendation instead is to only list a simple message as ``Initial Release``. This is now the default for PSR when providing the very first release (no pre-existing tags) in the changelog and release notes. If your configuration already sets the ``changelog.default_templates.mask_initial_release`` value, then this change will have no effect on your project. If you do NOT want to mask the first release information, then set ``changelog.default_templates.mask_initial_release`` to ``false`` in your configuration. * **config**: This release switches the ``changelog.mode`` default to ``update``. In this mode, if a changelog exists, PSR will update the changelog **IF AND ONLY IF** the configured insertion flag exists in the changelog. The Changelog output will remain unchanged if no insertion flag exists. The insertion flag may be configured with the ``changelog.insertion_flag`` setting. When upgrading to ``v10``, you must add the insertion flag manually or you can just delete the changelog file and run PSR's changelog generation and it will rebuild the changelog (similar to init mode) but it will add the insertion flag. If your configuration already sets the ``changelog.mode`` value, then this change will have no effect on your project. If you would rather the changelog be generated from scratch every release, than set the ``changelog.mode`` value to ``init`` in your configuration. * **github-action**: The ``root_options`` action input parameter has been removed because it created a command injection vulnerability for arbitrary code to execute within the container context of the GitHub action if a command injection code was provided as part of the ``root_options`` parameter string. To eliminate the vulnerability, each relevant option that can be provided to ``semantic-release`` has been individually added as its own parameter and will be processed individually to prevent command injection. Please review our `Github Actions Configuration`__ page to review the newly available configuration options that replace the ``root_options`` parameter. __ https://github.com/python-semantic-release/python-semantic-release/blob/v10.0.0/docs/configuration/automatic-releases/github-actions.rst * **parser-conventional**: Any breaking change footer messages that the conventional commit parser detects will now be removed from the ``commit.descriptions[]`` list but maintained in and only in the ``commit.breaking_descriptions[]`` list. Previously, the descriptions included all text from the commit message but that was redundant as the default changelog now handles breaking change footers in its own section. * **parser-conventional, parser-emoji, parser-scipy**: Any issue resolution footers that the parser detects will now be removed from the ``commit.descriptions[]`` list. Previously, the descriptions included all text from the commit message but now that the parser pulls out the issue numbers the numbers will be included in the ``commit.linked_issues`` tuple for user extraction in any changelog generation. * **parser-conventional, parser-emoji, parser-scipy**: Any release notice footer messages that the commit parser detects will now be removed from the ``commit.descriptions[]`` list but maintained in and only in the ``commit.notices[]`` list. Previously, the descriptions included all text from the commit message but that was redundant as the default changelog now handles release notice footers in its own section. * **parser-conventional, parser-emoji, parser-scipy**: Generally, a pull request or merge request number reference is included in the subject line at the end within parentheses on some common VCS's like GitHub. PSR now looks for this reference and extracts it into the ``commit.linked_merge_request`` and the ``commit.linked_pull_request`` attributes of a commit object. Since this is now pulled out individually, it is cleaner to remove this from the first line of the ``commit.descriptions`` list (ie. the subject line) so that changelog macros do not have to replace the text but instead only append a PR/MR link to the end of the line. The reference does maintain the PR/MR prefix indicator (`#` or ``!``). * **parser-conventional, parser-emoji, parser-scipy**: The configuration setting ``commit_parser_options.ignore_merge_commits`` is now set to ``true`` by default. The feature to ignore squash commits was introduced in ``v9.18.0`` and was originally set to ``false`` to prevent unexpected results on a non-breaking update. The ignore merge commits feature prevents additional unnecessary processing on a commit message that likely will not match a commit message syntax. Most merge commits are syntactically pre-defined by Git or Remote Version Control System (ex. GitHub, etc.) and do not follow a commit convention (nor should they). The larger issue with merge commits is that they ultimately are a full copy of all the changes that were previously created and committed. The merge commit itself ensures that the previous commit tree is maintained in history, therefore the commit message always exists. If merge commits are parsed, it generally creates duplicate messages that will end up in your changelog, which is less than desired in most cases. If you have previously used the ``changelog.exclude_commit_patterns`` functionality to ignore merge commit messages then you will want this setting set to ``true`` to improve parsing speed. You can also now remove the merge commit exclude pattern from the list as well to improve parsing speed. If this functionality is not desired, you will need to update your configuration to change the new setting to ``false``. * **parser-conventional, parser-emoji, parser-scipy**: The configuration setting ``commit_parser_options.parse_squash_commits`` is now set to ``true`` by default. The feature to parse squash commits was introduced in ``v9.17.0`` and was originally set to ``false`` to prevent unexpected results on a non-breaking update. The parse squash commits feature attempts to find additional commits of the same commit type within the body of a single commit message. When squash commits are found, Python Semantic Release will separate out each commit into its own artificial commit object and parse them individually. This potentially can change the resulting version bump if a larger bump was detected within the squashed components. It also allows for the changelog and release notes to separately order and display each commit as originally written. If this is not desired, you will need to update your configuration to change the new setting to ``false``. .. _#733: https://github.com/python-semantic-release/python-semantic-release/issues/733 .. _080e4bc: https://github.com/python-semantic-release/python-semantic-release/commit/080e4bcb14048a2dd10445546a7ee3159b3ab85c .. _0bed906: https://github.com/python-semantic-release/python-semantic-release/commit/0bed9069df67ae806ad0a15f8434ac4efcc6ba31 .. _0e114c3: https://github.com/python-semantic-release/python-semantic-release/commit/0e114c3458a24b87bfd2d6b0cd3f5cfdc9497084 .. _16465f1: https://github.com/python-semantic-release/python-semantic-release/commit/16465f133386b09627d311727a6f8d24dd8f174f .. _203d29d: https://github.com/python-semantic-release/python-semantic-release/commit/203d29d9d6b8e862eabe2f99dbd27eabf04e75e2 .. _3cfee76: https://github.com/python-semantic-release/python-semantic-release/commit/3cfee76032662bda6fbdd7e2585193213e4f9da2 .. _4aa6a6e: https://github.com/python-semantic-release/python-semantic-release/commit/4aa6a6edbff75889e09f32f7cba52cb90c9fb626 .. _4e52f4b: https://github.com/python-semantic-release/python-semantic-release/commit/4e52f4bba46e96a4762f97d306f15ae52c5cea1b .. _4ea92ec: https://github.com/python-semantic-release/python-semantic-release/commit/4ea92ec34dcd45d8cbab24e38e55289617b2d728 .. _514a922: https://github.com/python-semantic-release/python-semantic-release/commit/514a922fa87721e2500062dcae841bedd84dc1fe .. _5310d0c: https://github.com/python-semantic-release/python-semantic-release/commit/5310d0c700840538f27874394b9964bf09cd69b1 .. _58308e3: https://github.com/python-semantic-release/python-semantic-release/commit/58308e31bb6306aac3a985af01eb779dc923d3f0 .. _59bf084: https://github.com/python-semantic-release/python-semantic-release/commit/59bf08440a15269afaac81d78dd03ee418f9fd6b .. _5fb02ab: https://github.com/python-semantic-release/python-semantic-release/commit/5fb02ab6e3b8278ecbf92ed35083ffb595bc19b8 .. _634fffe: https://github.com/python-semantic-release/python-semantic-release/commit/634fffea29157e9b6305b21802c78ac245454265 .. _67b2ae0: https://github.com/python-semantic-release/python-semantic-release/commit/67b2ae0050cce540a4126fe280cca6dc4bcf5d3f .. _6aad7f1: https://github.com/python-semantic-release/python-semantic-release/commit/6aad7f17e64fb4717ddd7a9e94d2a730be6a3bd9 .. _6cd0fbe: https://github.com/python-semantic-release/python-semantic-release/commit/6cd0fbeb44e16d394c210216c7099afa51f5a4a3 .. _6fcdc99: https://github.com/python-semantic-release/python-semantic-release/commit/6fcdc99e9462b1186ea9488fc14e4e18f8c7fdb3 .. _731466f: https://github.com/python-semantic-release/python-semantic-release/commit/731466fec4e06fe71f6c4addd4ae2ec2182ae9c1 .. _7d39e76: https://github.com/python-semantic-release/python-semantic-release/commit/7d39e7675f859463b54751d59957b869d5d8395c .. _7e8dc13: https://github.com/python-semantic-release/python-semantic-release/commit/7e8dc13c0b048a95d01f7aecfbe4eeedcddec9a4 .. _8a51525: https://github.com/python-semantic-release/python-semantic-release/commit/8a5152573b9175f01be06d0c4531ea0ca4de8dd4 .. _8bed5bc: https://github.com/python-semantic-release/python-semantic-release/commit/8bed5bcca4a5759af0e3fb24eadf14aa4e4f53c9 .. _a082896: https://github.com/python-semantic-release/python-semantic-release/commit/a08289693085153effdafe3c6ff235a1777bb1fa .. _a5f5e04: https://github.com/python-semantic-release/python-semantic-release/commit/a5f5e042ae9af909ee9e3ddf57c78adbc92ce378 .. _b1bb0e5: https://github.com/python-semantic-release/python-semantic-release/commit/b1bb0e55910715754eebef6cb5b21ebed5ee8d68 .. _b271cbb: https://github.com/python-semantic-release/python-semantic-release/commit/b271cbb2d3e8b86d07d1358b2e7424ccff6ae186 .. _b6307cb: https://github.com/python-semantic-release/python-semantic-release/commit/b6307cb649043bbcc7ad9f15ac5ac6728914f443 .. _b757603: https://github.com/python-semantic-release/python-semantic-release/commit/b757603e77ebe26d8a14758d78fd21163a9059b2 .. _bd3e7bf: https://github.com/python-semantic-release/python-semantic-release/commit/bd3e7bfa86d53a03f03ac419399847712c523b02 .. _c4d45ec: https://github.com/python-semantic-release/python-semantic-release/commit/c4d45ec46dfa81f645c25ea18ffffe9635922603 .. _c6b6eab: https://github.com/python-semantic-release/python-semantic-release/commit/c6b6eabbfe100d2c741620eb3fa12a382531fa94 .. _d4f128e: https://github.com/python-semantic-release/python-semantic-release/commit/d4f128e75e33256c0163fbb475c7c41e18f65147 .. _da4140f: https://github.com/python-semantic-release/python-semantic-release/commit/da4140f3e3a2ed03c05064f35561b4584f517105 .. _e7ac155: https://github.com/python-semantic-release/python-semantic-release/commit/e7ac155a91fc2e735d3cbf9b66fb4e5ff40a1466 .. _eed63fa: https://github.com/python-semantic-release/python-semantic-release/commit/eed63fa9f6e762f55700fc85ef3ebdc0d3144f21 .. _fb3da27: https://github.com/python-semantic-release/python-semantic-release/commit/fb3da27650ff15bcdb3b7badc919bd8a9a73238d .. _PR#1244: https://github.com/python-semantic-release/python-semantic-release/pull/1244 .. _PR#1245: https://github.com/python-semantic-release/python-semantic-release/pull/1245 .. _changelog-v9.21.1: v9.21.1 (2025-05-05) ==================== 🪲 Bug Fixes ------------ * **changelog-filters**: Fixes url resolution when prefix & path share letters, closes `#1204`_ (`PR#1239`_, `f61f8a3`_) 📖 Documentation ---------------- * **github-actions**: Expound on monorepo example to include publishing actions (`PR#1229`_, `550e85f`_) âš™ï¸ Build System ---------------- * **deps**: Bump ``rich`` dependency from ``13.0`` to ``14.0`` (`PR#1224`_, `691536e`_) * **deps**: Expand ``python-gitlab`` dependency to include ``v5.0.0`` (`PR#1228`_, `a0cd1be`_) .. _#1204: https://github.com/python-semantic-release/python-semantic-release/issues/1204 .. _550e85f: https://github.com/python-semantic-release/python-semantic-release/commit/550e85f5ec2695d5aa680014127846d58c680e31 .. _691536e: https://github.com/python-semantic-release/python-semantic-release/commit/691536e98f311d0fc6d29a72c41ce5a65f1f4b6c .. _a0cd1be: https://github.com/python-semantic-release/python-semantic-release/commit/a0cd1be4e3aa283cbdc544785e5f895c8391dfb8 .. _f61f8a3: https://github.com/python-semantic-release/python-semantic-release/commit/f61f8a38a1a3f44a7a56cf9dcb7dde748f90ca1e .. _PR#1224: https://github.com/python-semantic-release/python-semantic-release/pull/1224 .. _PR#1228: https://github.com/python-semantic-release/python-semantic-release/pull/1228 .. _PR#1229: https://github.com/python-semantic-release/python-semantic-release/pull/1229 .. _PR#1239: https://github.com/python-semantic-release/python-semantic-release/pull/1239 .. _changelog-v9.21.0: v9.21.0 (2025-02-23) ==================== ✨ Features ----------- * Add package name variant, ``python-semantic-release``, project script, closes `#1195`_ (`PR#1199`_, `1ac97bc`_) 📖 Documentation ---------------- * **github-actions**: Update example workflow to handle rapid merges (`PR#1200`_, `1a4116a`_) .. _#1195: https://github.com/python-semantic-release/python-semantic-release/issues/1195 .. _1a4116a: https://github.com/python-semantic-release/python-semantic-release/commit/1a4116af4b999144998cf94cf84c9c23ff2e352f .. _1ac97bc: https://github.com/python-semantic-release/python-semantic-release/commit/1ac97bc74c69ce61cec98242c19bf8adc1d37fb9 .. _PR#1199: https://github.com/python-semantic-release/python-semantic-release/pull/1199 .. _PR#1200: https://github.com/python-semantic-release/python-semantic-release/pull/1200 .. _changelog-v9.20.0: v9.20.0 (2025-02-17) ==================== ✨ Features ----------- * **cmd-version**: Enable stamping of tag formatted versions into files, closes `#846`_ (`PR#1190`_, `8906d8e`_) * **cmd-version**: Extend ``version_variables`` to stamp versions with ``@`` symbol separator, closes `#1156`_ (`PR#1185`_, `23f69b6`_) 📖 Documentation ---------------- * **configuration**: Add usage information for tag format version stamping (`PR#1190`_, `8906d8e`_) * **configuration**: Clarify ``version_variables`` config description & ``@`` separator usage (`PR#1185`_, `23f69b6`_) âš™ï¸ Build System ---------------- * **deps**: Add ``deprecated~=1.2`` for deprecation notices & sphinx documentation (`PR#1190`_, `8906d8e`_) .. _#1156: https://github.com/python-semantic-release/python-semantic-release/issues/1156 .. _#846: https://github.com/python-semantic-release/python-semantic-release/issues/846 .. _23f69b6: https://github.com/python-semantic-release/python-semantic-release/commit/23f69b6ac206d111b1e566367f9b2f033df5c87a .. _8906d8e: https://github.com/python-semantic-release/python-semantic-release/commit/8906d8e70467af1489d797ec8cb09b1f95e5d409 .. _PR#1185: https://github.com/python-semantic-release/python-semantic-release/pull/1185 .. _PR#1190: https://github.com/python-semantic-release/python-semantic-release/pull/1190 .. _changelog-v9.19.1: v9.19.1 (2025-02-11) ==================== 🪲 Bug Fixes ------------ * **changelog**: Standardize heading format for across all version sections (`PR#1182`_, `81f9e80`_) * **changelog-md**: Standardize heading format for extra release information (`PR#1182`_, `81f9e80`_) * **changelog-rst**: Standardize heading format for extra release information (`PR#1182`_, `81f9e80`_) * **config**: Handle invalid ``commit_parser`` type gracefully (`PR#1180`_, `903c8ba`_) * **release-notes**: Standardize heading format for extra release information (`PR#1182`_, `81f9e80`_) 📖 Documentation ---------------- * Fix spelling errors & inaccurate descriptions (`55d4a05`_) * **automatic-releases**: Declutter the table of contents for automatic release guides (`e8343ee`_) * **commit-parsing**: Update reference to section name of additional release info (`PR#1182`_, `81f9e80`_) .. _55d4a05: https://github.com/python-semantic-release/python-semantic-release/commit/55d4a05ff56321cf9874f8f302fbe7e5163ad4f7 .. _81f9e80: https://github.com/python-semantic-release/python-semantic-release/commit/81f9e80c3df185ef5e553e024b903ce153e14304 .. _903c8ba: https://github.com/python-semantic-release/python-semantic-release/commit/903c8ba68d797f7cd9e5025c9a3a3ad471c805ae .. _e8343ee: https://github.com/python-semantic-release/python-semantic-release/commit/e8343eeb38d3b4e18953ac0f97538df396d22b76 .. _PR#1180: https://github.com/python-semantic-release/python-semantic-release/pull/1180 .. _PR#1182: https://github.com/python-semantic-release/python-semantic-release/pull/1182 .. _changelog-v9.19.0: v9.19.0 (2025-02-10) ==================== ✨ Features ----------- * **parser-conventional**: Add official ``conventional-commits`` parser (`PR#1177`_, `27ddf84`_) 📖 Documentation ---------------- * Update references to Angular parser to Conventional Commit Parser (`PR#1177`_, `27ddf84`_) 💡 Additional Release Information --------------------------------- * **parser-conventional**: The 'angular' commit parser has been renamed to 'conventional' to match the official conventional-commits standard for which the 'angular' parser has evolved into. Please update your configurations to specify 'conventional' as the 'commit_parser' value in place of 'angular'. The 'angular' type will be removed in v11. .. _27ddf84: https://github.com/python-semantic-release/python-semantic-release/commit/27ddf840f8c812361c60bac9cf0b110d401f33d6 .. _PR#1177: https://github.com/python-semantic-release/python-semantic-release/pull/1177 .. _changelog-v9.18.1: v9.18.1 (2025-02-08) ==================== 🪲 Bug Fixes ------------ * **config**: Refactors default token resolution to prevent pre-mature insecure URL error, closes `#1074`_, `#1169`_ (`PR#1173`_, `37db258`_) .. _#1074: https://github.com/python-semantic-release/python-semantic-release/issues/1074 .. _#1169: https://github.com/python-semantic-release/python-semantic-release/issues/1169 .. _37db258: https://github.com/python-semantic-release/python-semantic-release/commit/37db2581620ad02e66716a4b3b365aa28abe65f8 .. _PR#1173: https://github.com/python-semantic-release/python-semantic-release/pull/1173 .. _changelog-v9.18.0: v9.18.0 (2025-02-06) ==================== ✨ Features ----------- * Add ``create_release_url`` & ``format_w_official_vcs_name`` filters (`PR#1161`_, `f853cf0`_) * **changelog**: Add ``create_pypi_url`` filter to jinja template render context (`PR#1160`_, `45d49c3`_) * **changelog**: Add additional release info to changeling from commit ``NOTICE``'s (`PR#1166`_, `834ce32`_) * **changelog-md**: Add additional release info section to default markdown template, closes `#223`_ (`PR#1166`_, `834ce32`_) * **changelog-rst**: Add additional release info section to default ReStructuredText template, closes `#223`_ (`PR#1166`_, `834ce32`_) * **commit-parser**: Enable parsers to identify additional release notices from commit msgs (`PR#1166`_, `834ce32`_) * **parser-angular**: Add a ``ignore_merge_commits`` option to discard parsing merge commits (`PR#1164`_, `463e43b`_) * **parser-angular**: Add functionality to parse out ``NOTICE:`` prefixed statements in commits, closes `#223`_ (`PR#1166`_, `834ce32`_) * **parser-emoji**: Add a ``ignore_merge_commits`` option to discard parsing merge commits (`PR#1164`_, `463e43b`_) * **parser-emoji**: Add functionality to parse out ``NOTICE:`` prefixed statements in commits, closes `#223`_ (`PR#1166`_, `834ce32`_) * **parsers**: Add option ``ignore_merge_commits`` to discard parsing merge commits (`PR#1164`_, `463e43b`_) * **release-notes**: Add license information to default release notes template, closes `#228`_ (`PR#1167`_, `41172c1`_) * **vcs-bitbucket**: Add ``format_w_official_vcs_name`` filter function (`PR#1161`_, `f853cf0`_) * **vcs-gitea**: Add ``create_release_url`` & ``format_w_official_vcs_name`` filter functions (`PR#1161`_, `f853cf0`_) * **vcs-github**: Add ``create_release_url`` & ``format_w_official_vcs_name`` filter functions (`PR#1161`_, `f853cf0`_) * **vcs-gitlab**: Add ``create_release_url`` & ``format_w_official_vcs_name`` filter functions (`PR#1161`_, `f853cf0`_) 🪲 Bug Fixes ------------ * Refactor parsing compatibility function to support older custom parsers (`PR#1165`_, `cf340c5`_) * **changelog**: Fix parsing compatibility w/ custom parsers, closes `#1162`_ (`PR#1165`_, `cf340c5`_) * **changelog-templates**: Adjust default templates to avoid empty version sections (`PR#1164`_, `463e43b`_) * **parser-angular**: Adjust parser to prevent empty message extractions (`PR#1166`_, `834ce32`_) * **parser-emoji**: Adjust parser to prevent empty message extractions (`PR#1166`_, `834ce32`_) * **version**: Fix parsing compatibility w/ custom parsers, closes `#1162`_ (`PR#1165`_, `cf340c5`_) 📖 Documentation ---------------- * **changelog**: Add formatted changelog into hosted documentation (`PR#1155`_, `2f18a6d`_) * **changelog-templates**: Add description for new ``create_pypi_url`` filter function (`PR#1160`_, `45d49c3`_) * **changelog-templates**: Add details about license specification in the release notes (`PR#1167`_, `41172c1`_) * **changelog-templates**: Define ``create_release_url`` & ``format_w_official_vcs_name`` filters (`PR#1161`_, `f853cf0`_) * **changelog-templates**: Document special separate sections of commit descriptions (`ebb4c67`_) * **commit-parsing**: Document new release notice footer detection feature of built-in parsers (`cd14e92`_) .. _#1162: https://github.com/python-semantic-release/python-semantic-release/issues/1162 .. _#223: https://github.com/python-semantic-release/python-semantic-release/issues/223 .. _#228: https://github.com/python-semantic-release/python-semantic-release/issues/228 .. _2f18a6d: https://github.com/python-semantic-release/python-semantic-release/commit/2f18a6debfa6ef3afcc5611a3e09262998f2d4bf .. _41172c1: https://github.com/python-semantic-release/python-semantic-release/commit/41172c1272a402e94e3c68571d013cbdcb5b9023 .. _45d49c3: https://github.com/python-semantic-release/python-semantic-release/commit/45d49c3da75a7f08c86fc9bab5d232a9b37d9e72 .. _463e43b: https://github.com/python-semantic-release/python-semantic-release/commit/463e43b897ee80dfaf7ce9d88d22ea8e652bcf55 .. _834ce32: https://github.com/python-semantic-release/python-semantic-release/commit/834ce323007c58229abf115ef2016a348de9ee66 .. _cd14e92: https://github.com/python-semantic-release/python-semantic-release/commit/cd14e9209d4e54f0876e737d1f802dded294a48c .. _cf340c5: https://github.com/python-semantic-release/python-semantic-release/commit/cf340c5256dea58aedad71a6bdf50b17eee53d2f .. _ebb4c67: https://github.com/python-semantic-release/python-semantic-release/commit/ebb4c67d46b86fdf79e32edf744a2ec2b09d6a93 .. _f853cf0: https://github.com/python-semantic-release/python-semantic-release/commit/f853cf059b3323d7888b06fde09142184e7964e8 .. _PR#1155: https://github.com/python-semantic-release/python-semantic-release/pull/1155 .. _PR#1160: https://github.com/python-semantic-release/python-semantic-release/pull/1160 .. _PR#1161: https://github.com/python-semantic-release/python-semantic-release/pull/1161 .. _PR#1164: https://github.com/python-semantic-release/python-semantic-release/pull/1164 .. _PR#1165: https://github.com/python-semantic-release/python-semantic-release/pull/1165 .. _PR#1166: https://github.com/python-semantic-release/python-semantic-release/pull/1166 .. _PR#1167: https://github.com/python-semantic-release/python-semantic-release/pull/1167 .. _changelog-v9.17.0: v9.17.0 (2025-01-26) ==================== ✨ Features ----------- * **changelog**: Add ``sort_numerically`` filter function to template environment (`PR#1146`_, `7792388`_) * **changelog**: Parse squashed commits individually (`PR#1112`_, `cf785ca`_) * **config**: Extend support of remote urls aliased using git ``insteadOf`` configurations, closes `#1150`_ (`PR#1151`_, `4045037`_) * **parsers**: Parse squashed commits individually (`PR#1112`_, `cf785ca`_) * **parser-angular**: Apply PR/MR numbers to all parsed commits from a squash merge (`PR#1112`_, `cf785ca`_) * **parser-angular**: Upgrade angular parser to parse squashed commits individually, closes `#1085`_ (`PR#1112`_, `cf785ca`_) * **parser-emoji**: Add functionality to interpret scopes from gitmoji commit messages (`PR#1112`_, `cf785ca`_) * **parser-emoji**: Upgrade emoji parser to parse squashed commits individually (`PR#1112`_, `cf785ca`_) * **version**: Parse squashed commits individually (`PR#1112`_, `cf785ca`_) 🪲 Bug Fixes ------------ * **github-action**: Disable writing python bytecode in action execution (`PR#1152`_, `315ae21`_) âš¡ Performance Improvements --------------------------- * **logging**: Remove irrelevant debug logging statements (`PR#1147`_, `f1ef4ec`_) 📖 Documentation ---------------- * **changelog-templates**: Add description for new ``sort_numerically`` filter function (`PR#1146`_, `7792388`_) * **commit-parsing**: Add description for squash commit evaluation option of default parsers (`PR#1112`_, `cf785ca`_) * **configuration**: Update the ``commit_parser_options`` setting description (`PR#1112`_, `cf785ca`_) .. _#1085: https://github.com/python-semantic-release/python-semantic-release/issues/1085 .. _#1150: https://github.com/python-semantic-release/python-semantic-release/issues/1150 .. _315ae21: https://github.com/python-semantic-release/python-semantic-release/commit/315ae2176e211b00b13374560d81e127a3065d1a .. _4045037: https://github.com/python-semantic-release/python-semantic-release/commit/40450375c7951dafddb09bef8001db7180d95f3a .. _7792388: https://github.com/python-semantic-release/python-semantic-release/commit/77923885c585171e8888aacde989837ecbabf3fc .. _cf785ca: https://github.com/python-semantic-release/python-semantic-release/commit/cf785ca79a49eb4ee95c148e0ae6a19e230e915c .. _f1ef4ec: https://github.com/python-semantic-release/python-semantic-release/commit/f1ef4ecf5f22684a870b958f87d1ca2650e612db .. _PR#1112: https://github.com/python-semantic-release/python-semantic-release/pull/1112 .. _PR#1146: https://github.com/python-semantic-release/python-semantic-release/pull/1146 .. _PR#1147: https://github.com/python-semantic-release/python-semantic-release/pull/1147 .. _PR#1151: https://github.com/python-semantic-release/python-semantic-release/pull/1151 .. _PR#1152: https://github.com/python-semantic-release/python-semantic-release/pull/1152 .. _changelog-v9.16.1: v9.16.1 (2025-01-12) ==================== 🪲 Bug Fixes ------------ * **parser-custom**: Handle relative parent directory paths to module file better (`PR#1142`_, `c4056fc`_) 📖 Documentation ---------------- * **github-actions**: Update PSR versions in github workflow examples (`PR#1140`_, `9bdd626`_) .. _9bdd626: https://github.com/python-semantic-release/python-semantic-release/commit/9bdd626bf8f8359d35725cebe803931063260cac .. _c4056fc: https://github.com/python-semantic-release/python-semantic-release/commit/c4056fc2e1fb3bddb78728793716ac6fb8522b1a .. _PR#1140: https://github.com/python-semantic-release/python-semantic-release/pull/1140 .. _PR#1142: https://github.com/python-semantic-release/python-semantic-release/pull/1142 .. _changelog-v9.16.0: v9.16.0 (2025-01-12) ==================== ✨ Features ----------- * **config**: Expand dynamic parser import to handle a filepath to module (`PR#1135`_, `0418fd8`_) 🪲 Bug Fixes ------------ * **changelog**: Fixes PSR release commit exclusions for customized commit messages (`PR#1139`_, `f9a2078`_) * **cmd-version**: Fixes ``--print-tag`` result to match configured tag format (`PR#1134`_, `a990aa7`_) * **cmd-version**: Fixes tag format on default version when force bump for initial release, closes `#1137`_ (`PR#1138`_, `007fd00`_) * **config-changelog**: Validate ``changelog.exclude_commit_patterns`` on config load (`PR#1139`_, `f9a2078`_) 📖 Documentation ---------------- * **commit-parsing**: Add the new custom parser import spec description for direct path imports, closes `#687`_ (`PR#1135`_, `0418fd8`_) * **configuration**: Adjust ``commit_parser`` option definition for direct path imports (`PR#1135`_, `0418fd8`_) .. _#687: https://github.com/python-semantic-release/python-semantic-release/issues/687 .. _#1137: https://github.com/python-semantic-release/python-semantic-release/issues/1137 .. _007fd00: https://github.com/python-semantic-release/python-semantic-release/commit/007fd00a3945ed211ece4baab0b79ad93dc018f5 .. _0418fd8: https://github.com/python-semantic-release/python-semantic-release/commit/0418fd8d27aac14925aafa50912e751e3aeff2f7 .. _a990aa7: https://github.com/python-semantic-release/python-semantic-release/commit/a990aa7ab0a9d52d295c04d54d20e9c9f2db2ca5 .. _f9a2078: https://github.com/python-semantic-release/python-semantic-release/commit/f9a20787437d0f26074fe2121bf0a29576a96df0 .. _PR#1134: https://github.com/python-semantic-release/python-semantic-release/pull/1134 .. _PR#1135: https://github.com/python-semantic-release/python-semantic-release/pull/1135 .. _PR#1138: https://github.com/python-semantic-release/python-semantic-release/pull/1138 .. _PR#1139: https://github.com/python-semantic-release/python-semantic-release/pull/1139 .. _changelog-v9.15.2: v9.15.2 (2024-12-16) ==================== 🪲 Bug Fixes ------------ * **changelog**: Ensures user rendered files are trimmed to end with a single newline (`PR#1118`_, `6dfbbb0`_) * **cli**: Add error message of how to gather full error output (`PR#1116`_, `ba85532`_) * **cmd-version**: Enable maintenance prereleases (`PR#864`_, `b88108e`_) * **cmd-version**: Fix handling of multiple prerelease token variants & git flow merges (`PR#1120`_, `8784b9a`_) * **cmd-version**: Fix version determination algorithm to capture commits across merged branches (`PR#1120`_, `8784b9a`_) * **cmd-version**: Forces tag timestamp to be same time as release commit (`PR#1117`_, `7898b11`_) * **cmd-version**: Handle multiple prerelease token variants properly, closes `#789`_ (`PR#1120`_, `8784b9a`_) * **config**: Ensure default config loads on network mounted windows environments, closes `#1123`_ (`PR#1124`_, `a64cbc9`_) * **version**: Remove some excessive log msgs from debug to silly level (`PR#1120`_, `8784b9a`_) * **version-bump**: Increment based on current commit's history only, closes `#861`_ (`PR#864`_, `b88108e`_) âš¡ Performance Improvements --------------------------- * **cmd-version**: Refactor version determination algorithm for accuracy & speed (`PR#1120`_, `8784b9a`_) .. _#789: https://github.com/python-semantic-release/python-semantic-release/issues/789 .. _#861: https://github.com/python-semantic-release/python-semantic-release/issues/861 .. _#1123: https://github.com/python-semantic-release/python-semantic-release/issues/1123 .. _6dfbbb0: https://github.com/python-semantic-release/python-semantic-release/commit/6dfbbb0371aef6b125cbcbf89b80dc343ed97360 .. _7898b11: https://github.com/python-semantic-release/python-semantic-release/commit/7898b1185fc1ad10e96bf3f5e48d9473b45d2b51 .. _8784b9a: https://github.com/python-semantic-release/python-semantic-release/commit/8784b9ad4bc59384f855b5af8f1b8fb294397595 .. _a64cbc9: https://github.com/python-semantic-release/python-semantic-release/commit/a64cbc96c110e32f1ec5d1a7b61e950472491b87 .. _b88108e: https://github.com/python-semantic-release/python-semantic-release/commit/b88108e189e1894e36ae4fdf8ad8a382b5c8c90a .. _ba85532: https://github.com/python-semantic-release/python-semantic-release/commit/ba85532ddd6fcf1a2205f7ce0b88ea5be76cb621 .. _PR#864: https://github.com/python-semantic-release/python-semantic-release/pull/864 .. _PR#1116: https://github.com/python-semantic-release/python-semantic-release/pull/1116 .. _PR#1117: https://github.com/python-semantic-release/python-semantic-release/pull/1117 .. _PR#1118: https://github.com/python-semantic-release/python-semantic-release/pull/1118 .. _PR#1120: https://github.com/python-semantic-release/python-semantic-release/pull/1120 .. _PR#1124: https://github.com/python-semantic-release/python-semantic-release/pull/1124 .. _changelog-v9.15.1: v9.15.1 (2024-12-03) ==================== 🪲 Bug Fixes ------------ * **changelog-md**: Fix commit sort of breaking descriptions section (`75b342e`_) * **parser-angular**: Ensure issues are sorted by numeric value rather than text sorted (`3858add`_) * **parser-emoji**: Ensure issues are sorted by numeric value rather than text sorted (`7b8d2d9`_) .. _3858add: https://github.com/python-semantic-release/python-semantic-release/commit/3858add582fe758dc2ae967d0cd051d43418ecd0 .. _75b342e: https://github.com/python-semantic-release/python-semantic-release/commit/75b342e6259412cb82d8b7663e5ee4536d14f407 .. _7b8d2d9: https://github.com/python-semantic-release/python-semantic-release/commit/7b8d2d92e135ab46d1be477073ccccc8c576f121 .. _changelog-v9.15.0: v9.15.0 (2024-12-02) ==================== ✨ Features ----------- * **changelog-md**: Add a breaking changes section to default Markdown template, closes `#244`_ (`PR#1110`_, `4fde30e`_) * **changelog-md**: Alphabetize breaking change descriptions in markdown changelog template (`PR#1110`_, `4fde30e`_) * **changelog-md**: Alphabetize commit summaries & scopes in markdown changelog template (`PR#1111`_, `8327068`_) * **changelog-rst**: Add a breaking changes section to default reStructuredText template, closes `#244`_ (`PR#1110`_, `4fde30e`_) * **changelog-rst**: Alphabetize breaking change descriptions in ReStructuredText template (`PR#1110`_, `4fde30e`_) * **changelog-rst**: Alphabetize commit summaries & scopes in ReStructuredText template (`PR#1111`_, `8327068`_) * **commit-parser**: Enable parsers to flag commit to be ignored for changelog, closes `#778`_ (`PR#1108`_, `0cc668c`_) * **default-changelog**: Add a separate formatted breaking changes section, closes `#244`_ (`PR#1110`_, `4fde30e`_) * **default-changelog**: Alphabetize commit summaries & scopes in change sections (`PR#1111`_, `8327068`_) * **parsers**: Add ``other_allowed_tags`` option for commit parser options (`PR#1109`_, `f90b8dc`_) * **parsers**: Enable parsers to identify linked issues on a commit (`PR#1109`_, `f90b8dc`_) * **parser-angular**: Automatically parse angular issue footers from commit messages (`PR#1109`_, `f90b8dc`_) * **parser-custom**: Enable custom parsers to identify linked issues on a commit (`PR#1109`_, `f90b8dc`_) * **parser-emoji**: Parse issue reference footers from commit messages (`PR#1109`_, `f90b8dc`_) * **release-notes**: Add tag comparison link to release notes when supported (`PR#1107`_, `9073344`_) 🪲 Bug Fixes ------------ * **cmd-version**: Ensure release utilizes a timezone aware datetime (`ca817ed`_) * **default-changelog**: Alphabetically sort commit descriptions in version type sections (`bdaaf5a`_) * **util**: Prevent git footers from being collapsed during parse (`PR#1109`_, `f90b8dc`_) 📖 Documentation ---------------- * **api-parsers**: Add option documentation to parser options (`PR#1109`_, `f90b8dc`_) * **changelog-templates**: Update examples using new ``commit.linked_issues`` attribute (`PR#1109`_, `f90b8dc`_) * **commit-parsing**: Improve & expand commit parsing w/ parser descriptions (`PR#1109`_, `f90b8dc`_) .. _#244: https://github.com/python-semantic-release/python-semantic-release/issues/244 .. _#778: https://github.com/python-semantic-release/python-semantic-release/issues/778 .. _0cc668c: https://github.com/python-semantic-release/python-semantic-release/commit/0cc668c36490401dff26bb2c3141f6120a2c47d0 .. _4fde30e: https://github.com/python-semantic-release/python-semantic-release/commit/4fde30e0936ecd186e448f1caf18d9ba377c55ad .. _8327068: https://github.com/python-semantic-release/python-semantic-release/commit/83270683fd02b626ed32179d94fa1e3c7175d113 .. _9073344: https://github.com/python-semantic-release/python-semantic-release/commit/9073344164294360843ef5522e7e4c529985984d .. _bdaaf5a: https://github.com/python-semantic-release/python-semantic-release/commit/bdaaf5a460ca77edc40070ee799430122132dc45 .. _ca817ed: https://github.com/python-semantic-release/python-semantic-release/commit/ca817ed9024cf84b306a047675534cc36dc116b2 .. _f90b8dc: https://github.com/python-semantic-release/python-semantic-release/commit/f90b8dc6ce9f112ef2c98539d155f9de24398301 .. _PR#1107: https://github.com/python-semantic-release/python-semantic-release/pull/1107 .. _PR#1108: https://github.com/python-semantic-release/python-semantic-release/pull/1108 .. _PR#1109: https://github.com/python-semantic-release/python-semantic-release/pull/1109 .. _PR#1110: https://github.com/python-semantic-release/python-semantic-release/pull/1110 .. _PR#1111: https://github.com/python-semantic-release/python-semantic-release/pull/1111 .. _changelog-v9.14.0: v9.14.0 (2024-11-11) ==================== ✨ Features ----------- * **changelog**: Add md to rst conversion for markdown inline links (`cb2af1f`_) * **changelog**: Define first release w/o change descriptions for default MD template (`fa89dec`_) * **changelog**: Define first release w/o change descriptions for default RST template (`e30c94b`_) * **changelog**: Prefix scopes on commit descriptions in default template (`PR#1093`_, `560fd2c`_) * **changelog-md**: Add markdown inline link format macro (`c6d8211`_) * **changelog-md**: Prefix scopes on commit descriptions in Markdown changelog template (`PR#1093`_, `560fd2c`_) * **changelog-rst**: Prefix scopes on commit descriptions in ReStructuredText template (`PR#1093`_, `560fd2c`_) * **configuration**: Add ``changelog.default_templates.mask_initial_release`` option (`595a70b`_) * **context**: Add ``mask_initial_release`` setting to changelog context (`6f2ee39`_) * **release-notes**: Define first release w/o change descriptions in default template (`83167a3`_) 🪲 Bug Fixes ------------ * **release-notes**: Override default word-wrap to non-wrap for in default template (`99ab99b`_) 📖 Documentation ---------------- * **changelog-templates**: Document new ``mask_initial_release`` changelog context variable (`f294957`_) * **configuration**: Document new ``mask_initial_release`` option usage & effect (`3cabcdc`_) * **homepage**: Fix reference to new ci workflow for test status badge (`6760069`_) .. _3cabcdc: https://github.com/python-semantic-release/python-semantic-release/commit/3cabcdcd9473e008604e74cc2d304595317e921d .. _560fd2c: https://github.com/python-semantic-release/python-semantic-release/commit/560fd2c0d58c97318377cb83af899a336d24cfcc .. _595a70b: https://github.com/python-semantic-release/python-semantic-release/commit/595a70bcbc8fea1f8ccf6c5069c41c35ec4efb8d .. _6760069: https://github.com/python-semantic-release/python-semantic-release/commit/6760069e7489f50635beb5aedbbeb2cb82b7c584 .. _6f2ee39: https://github.com/python-semantic-release/python-semantic-release/commit/6f2ee39414b3cf75c0b67dee4db0146bbc1041bb .. _83167a3: https://github.com/python-semantic-release/python-semantic-release/commit/83167a3dcceb7db16b790e1b0efd5fc75fee8942 .. _99ab99b: https://github.com/python-semantic-release/python-semantic-release/commit/99ab99bb0ba350ca1913a2bde9696f4242278972 .. _c6d8211: https://github.com/python-semantic-release/python-semantic-release/commit/c6d8211c859442df17cb41d2ff19fdb7a81cdb76 .. _cb2af1f: https://github.com/python-semantic-release/python-semantic-release/commit/cb2af1f17cf6c8ae037c6cd8bb8b4d9c019bb47e .. _e30c94b: https://github.com/python-semantic-release/python-semantic-release/commit/e30c94bffe62b42e8dc6ed4fed6260e57b4d532b .. _f294957: https://github.com/python-semantic-release/python-semantic-release/commit/f2949577dfb2dbf9c2ac952c1bbcc4ab84da080b .. _fa89dec: https://github.com/python-semantic-release/python-semantic-release/commit/fa89dec239efbae7544b187f624a998fa9ecc309 .. _PR#1093: https://github.com/python-semantic-release/python-semantic-release/pull/1093 .. _changelog-v9.13.0: v9.13.0 (2024-11-10) ==================== ✨ Features ----------- * **changelog**: Add PR/MR url linking to default Markdown changelog, closes `#924`_, `#953`_ (`cd8d131`_) * **changelog**: Add PR/MR url linking to default reStructuredText template, closes `#924`_, `#953`_ (`5f018d6`_) * **parsed-commit**: Add linked merge requests list to the ``ParsedCommit`` object (`9a91062`_) * **parser-angular**: Automatically parse PR/MR numbers from subject lines in commits (`2ac798f`_) * **parser-emoji**: Automatically parse PR/MR numbers from subject lines in commits (`bca9909`_) * **parser-scipy**: Automatically parse PR/MR numbers from subject lines in commits (`2b3f738`_) 🪲 Bug Fixes ------------ * **changelog-rst**: Ignore unknown parsed commit types in default RST changelog (`77609b1`_) * **parser-angular**: Drop the ``breaking`` category but still maintain a major level bump (`f1ffa54`_) * **parsers**: Improve reliability of descriptions after reverse word-wrap (`436374b`_) âš¡ Performance Improvements --------------------------- * **parser-angular**: Simplify commit parsing type pre-calculation (`a86a28c`_) * **parser-emoji**: Increase speed of commit parsing (`2c9c468`_) * **parser-scipy**: Increase speed & decrease complexity of commit parsing (`2b661ed`_) 📖 Documentation ---------------- * **changelog-templates**: Add ``linked_merge_request`` field to examples (`d4376bc`_) * **changelog-templates**: Fix api class reference links (`7a5bdf2`_) * **commit-parsing**: Add ``linked_merge_request`` field to Parsed Commit definition (`ca61889`_) .. _#924: https://github.com/python-semantic-release/python-semantic-release/issues/924 .. _#953: https://github.com/python-semantic-release/python-semantic-release/issues/953 .. _2ac798f: https://github.com/python-semantic-release/python-semantic-release/commit/2ac798f92e0c13c1db668747f7e35a65b99ae7ce .. _2b3f738: https://github.com/python-semantic-release/python-semantic-release/commit/2b3f73801f5760bac29acd93db3ffb2bc790cda0 .. _2b661ed: https://github.com/python-semantic-release/python-semantic-release/commit/2b661ed122a6f0357a6b92233ac1351c54c7794e .. _2c9c468: https://github.com/python-semantic-release/python-semantic-release/commit/2c9c4685a66feb35cd78571cf05f76344dd6d66a .. _436374b: https://github.com/python-semantic-release/python-semantic-release/commit/436374b04128d1550467ae97ba90253f1d1b3878 .. _5f018d6: https://github.com/python-semantic-release/python-semantic-release/commit/5f018d630b4c625bdf6d329b27fd966eba75b017 .. _77609b1: https://github.com/python-semantic-release/python-semantic-release/commit/77609b1917a00b106ce254e6f6d5edcd1feebba7 .. _7a5bdf2: https://github.com/python-semantic-release/python-semantic-release/commit/7a5bdf29b3df0f9a1346ea5301d2a7fee953667b .. _9a91062: https://github.com/python-semantic-release/python-semantic-release/commit/9a9106212d6c240e9d3358e139b4c4694eaf9c4b .. _a86a28c: https://github.com/python-semantic-release/python-semantic-release/commit/a86a28c5e26ed766cda71d26b9382c392e377c61 .. _bca9909: https://github.com/python-semantic-release/python-semantic-release/commit/bca9909c1b61fdb1f9ccf823fceb6951cd059820 .. _ca61889: https://github.com/python-semantic-release/python-semantic-release/commit/ca61889d4ac73e9864fbf637fb87ab2d5bc053ea .. _cd8d131: https://github.com/python-semantic-release/python-semantic-release/commit/cd8d1310a4000cc79b529fbbdc58933f4c6373c6 .. _d4376bc: https://github.com/python-semantic-release/python-semantic-release/commit/d4376bc2ae4d3708d501d91211ec3ee3a923e9b5 .. _f1ffa54: https://github.com/python-semantic-release/python-semantic-release/commit/f1ffa5411892de34cdc842fd55c460a24b6685c6 .. _changelog-v9.12.2: v9.12.2 (2024-11-07) ==================== 🪲 Bug Fixes ------------ * **bitbucket**: Fix ``pull_request_url`` filter to ignore an PR prefix gracefully (`PR#1089`_, `275ec88`_) * **cli**: Gracefully capture all exceptions unless in very verbose debug mode (`PR#1088`_, `13ca44f`_) * **gitea**: Fix ``issue_url`` filter to ignore an issue prefix gracefully (`PR#1089`_, `275ec88`_) * **gitea**: Fix ``pull_request_url`` filter to ignore an PR prefix gracefully (`PR#1089`_, `275ec88`_) * **github**: Fix ``issue_url`` filter to ignore an issue prefix gracefully (`PR#1089`_, `275ec88`_) * **github**: Fix ``pull_request_url`` filter to ignore an PR prefix gracefully (`PR#1089`_, `275ec88`_) * **gitlab**: Fix ``issue_url`` filter to ignore an issue prefix gracefully (`PR#1089`_, `275ec88`_) * **gitlab**: Fix ``merge_request_url`` filter to ignore an PR prefix gracefully (`PR#1089`_, `275ec88`_) * **hvcs**: Add flexibility to issue & MR/PR url jinja filters (`PR#1089`_, `275ec88`_) 📖 Documentation ---------------- * **changelog-templates**: Update descriptions of issue & MR/PR url jinja filters (`PR#1089`_, `275ec88`_) .. _13ca44f: https://github.com/python-semantic-release/python-semantic-release/commit/13ca44f4434098331f70e6937684679cf1b4106a .. _275ec88: https://github.com/python-semantic-release/python-semantic-release/commit/275ec88e6d1637c47065bb752a60017ceba9876c .. _PR#1088: https://github.com/python-semantic-release/python-semantic-release/pull/1088 .. _PR#1089: https://github.com/python-semantic-release/python-semantic-release/pull/1089 .. _changelog-v9.12.1: v9.12.1 (2024-11-06) ==================== 🪲 Bug Fixes ------------ * **changelog**: Fix raw-inline pattern replacement in ``convert_md_to_rst`` filter (`2dc70a6`_) * **cmd-version**: Fix ``--as-prerelease`` when no commit change from last full release (`PR#1076`_, `3b7b772`_) * **release-notes**: Add context variable shorthand ``ctx`` like docs claim & changelog has (`d618d83`_) 📖 Documentation ---------------- * **contributing**: Update local testing instructions (`74f03d4`_) .. _2dc70a6: https://github.com/python-semantic-release/python-semantic-release/commit/2dc70a6106776106b0fba474b0029071317d639f .. _3b7b772: https://github.com/python-semantic-release/python-semantic-release/commit/3b7b77246100cedd8cc8f289395f7641187ffdec .. _74f03d4: https://github.com/python-semantic-release/python-semantic-release/commit/74f03d44684b7b2d84f9f5e471425b02f8bf91c3 .. _d618d83: https://github.com/python-semantic-release/python-semantic-release/commit/d618d83360c4409fc149f70b97c5fe338fa89968 .. _PR#1076: https://github.com/python-semantic-release/python-semantic-release/pull/1076 .. _changelog-v9.12.0: v9.12.0 (2024-10-18) ==================== ✨ Features ----------- * **changelog**: Add ``autofit_text_width`` filter to template environment (`PR#1062`_, `83e4b86`_) 🪲 Bug Fixes ------------ * **changelog**: Ignore commit exclusion when a commit causes a version bump (`e8f886e`_) * **parser-angular**: Change ``Fixes`` commit type heading to ``Bug Fixes`` (`PR#1064`_, `09e3a4d`_) * **parser-emoji**: Enable the default bump level option (`bc27995`_) 📖 Documentation ---------------- * **changelog-templates**: Add definition & usage of ``autofit_text_width`` template filter (`PR#1062`_, `83e4b86`_) * **commit-parsers**: Add deprecation message for the tag parser (`af94540`_) * **configuration**: Add deprecation message for the tag parser (`a83b7e4`_) .. _09e3a4d: https://github.com/python-semantic-release/python-semantic-release/commit/09e3a4da6237740de8e9932d742b18d990e9d079 .. _83e4b86: https://github.com/python-semantic-release/python-semantic-release/commit/83e4b86abd4754c2f95ec2e674f04deb74b9a1e6 .. _a83b7e4: https://github.com/python-semantic-release/python-semantic-release/commit/a83b7e43e4eaa99790969a6c85f44e01cde80d0a .. _af94540: https://github.com/python-semantic-release/python-semantic-release/commit/af94540f2b1c63bf8a4dc977d5d0f66176962b64 .. _bc27995: https://github.com/python-semantic-release/python-semantic-release/commit/bc27995255a96b9d6cc743186e7c35098822a7f6 .. _e8f886e: https://github.com/python-semantic-release/python-semantic-release/commit/e8f886ef2abe8ceaea0a24a0112b92a167abd6a9 .. _PR#1062: https://github.com/python-semantic-release/python-semantic-release/pull/1062 .. _PR#1064: https://github.com/python-semantic-release/python-semantic-release/pull/1064 .. _changelog-v9.11.1: v9.11.1 (2024-10-15) ==================== 🪲 Bug Fixes ------------ * **changelog**: Prevent custom template errors when components are in hidden folders (`PR#1060`_, `a7614b0`_) .. _a7614b0: https://github.com/python-semantic-release/python-semantic-release/commit/a7614b0db8ce791e4252209e66f42b5b5275dffd .. _PR#1060: https://github.com/python-semantic-release/python-semantic-release/pull/1060 .. _changelog-v9.11.0: v9.11.0 (2024-10-12) ==================== ✨ Features ----------- * **changelog**: Add ``convert_md_to_rst`` filter to changelog environment (`PR#1055`_, `c2e8831`_) * **changelog**: Add default changelog in re-structured text format, closes `#399`_ (`PR#1055`_, `c2e8831`_) * **changelog**: Add default changelog template in reStructuredText format (`PR#1055`_, `c2e8831`_) * **config**: Enable default ``changelog.insertion_flag`` based on output format (`PR#1055`_, `c2e8831`_) * **config**: Enable target changelog filename to trigger RST output format, closes `#399`_ (`PR#1055`_, `c2e8831`_) 🪲 Bug Fixes ------------ * **changelog**: Correct spacing for default markdown template during updates (`PR#1055`_, `c2e8831`_) 📖 Documentation ---------------- * **changelog**: Clarify the ``convert_md_to_rst`` filter added to the template environment (`PR#1055`_, `c2e8831`_) * **changelog**: Increase detail about configuration options of default changelog creation (`PR#1055`_, `c2e8831`_) * **configuration**: Update ``changelog_file`` with deprecation notice of setting relocation (`PR#1055`_, `c2e8831`_) * **configuration**: Update ``output_format`` description for reStructuredText support (`PR#1055`_, `c2e8831`_) * **configuration**: Update details of ``insertion_flag``'s dynamic defaults with rst (`PR#1055`_, `c2e8831`_) .. _#399: https://github.com/python-semantic-release/python-semantic-release/issues/399 .. _c2e8831: https://github.com/python-semantic-release/python-semantic-release/commit/c2e883104d3c11e56f229638e988d8b571f86e34 .. _PR#1055: https://github.com/python-semantic-release/python-semantic-release/pull/1055 .. _changelog-v9.10.1: v9.10.1 (2024-10-10) ==================== 🪲 Bug Fixes ------------ * **config**: Handle branch match regex errors gracefully (`PR#1054`_, `4d12251`_) .. _4d12251: https://github.com/python-semantic-release/python-semantic-release/commit/4d12251c678a38de6b71cac5b9c1390eb9dd8ad6 .. _PR#1054: https://github.com/python-semantic-release/python-semantic-release/pull/1054 .. _changelog-v9.10.0: v9.10.0 (2024-10-08) ==================== ✨ Features ----------- * **changelog**: Add ``changelog_insertion_flag`` to changelog template context (`PR#1045`_, `c18c245`_) * **changelog**: Add ``changelog_mode`` to changelog template context (`PR#1045`_, `c18c245`_) * **changelog**: Add ``prev_changelog_file`` to changelog template context (`PR#1045`_, `c18c245`_) * **changelog**: Add ``read_file`` function to changelog template context (`PR#1045`_, `c18c245`_) * **changelog**: Add shorthand ``ctx`` variable to changelog template env (`PR#1045`_, `c18c245`_) * **changelog**: Modify changelog template to support changelog updates, closes `#858`_ (`PR#1045`_, `c18c245`_) * **config**: Add ``changelog.default_templates.output_format`` config option (`PR#1045`_, `c18c245`_) * **config**: Add ``changelog.insertion_flag`` as configuration option (`PR#1045`_, `c18c245`_) * **config**: Add ``changelog.mode`` as configuration option (`PR#1045`_, `c18c245`_) * **github-actions**: Add an action ``build`` directive to toggle the ``--skip-build`` option (`PR#1044`_, `26597e2`_) 🪲 Bug Fixes ------------ * **changelog**: Adjust angular heading names for readability (`PR#1045`_, `c18c245`_) * **changelog**: Ensure changelog templates can handle complex directory includes (`PR#1045`_, `c18c245`_) * **changelog**: Only render user templates when files exist (`PR#1045`_, `c18c245`_) * **config**: Prevent jinja from autoescaping markdown content by default (`PR#1045`_, `c18c245`_) 📖 Documentation ---------------- * **changelog-templates**: Improve detail & describe new ``changelog.mode="update"`` (`PR#1045`_, `c18c245`_) * **commands**: Update definition of the version commands ``--skip-build`` option (`PR#1044`_, `26597e2`_) * **configuration**: Add ``changelog.mode`` and ``changelog.insertion_flag`` config definitions (`PR#1045`_, `c18c245`_) * **configuration**: Define the new ``changelog.default_templates.output_format`` option (`PR#1045`_, `c18c245`_) * **configuration**: Mark version of configuration setting introduction (`PR#1045`_, `c18c245`_) * **configuration**: Standardize all true/false to lowercase ensuring toml-compatibility (`PR#1045`_, `c18c245`_) * **configuration**: Update ``changelog.environment.autoescape`` default to ``false`` to match code (`PR#1045`_, `c18c245`_) * **github-actions**: Add description of the ``build`` input directive (`PR#1044`_, `26597e2`_) * **github-actions**: Update primary example with workflow sha controlled pipeline (`14f04df`_) * **homepage**: Update custom changelog reference (`PR#1045`_, `c18c245`_) .. _#722: https://github.com/python-semantic-release/python-semantic-release/issues/722 .. _#858: https://github.com/python-semantic-release/python-semantic-release/issues/858 .. _14f04df: https://github.com/python-semantic-release/python-semantic-release/commit/14f04dffc7366142faecebb162d4449501cbf1fd .. _26597e2: https://github.com/python-semantic-release/python-semantic-release/commit/26597e24a80a37500264aa95a908ba366699099e .. _c18c245: https://github.com/python-semantic-release/python-semantic-release/commit/c18c245df51a9778af09b9dc7a315e3f11cdcda0 .. _PR#1044: https://github.com/python-semantic-release/python-semantic-release/pull/1044 .. _PR#1045: https://github.com/python-semantic-release/python-semantic-release/pull/1045 .. _changelog-v9.9.0: v9.9.0 (2024-09-28) =================== ✨ Features ----------- * **github-actions**: Add ``is_prerelease`` output to the version action (`PR#1038`_, `6a5d35d`_) 📖 Documentation ---------------- * **automatic-releases**: Drop extraneous github push configuration (`PR#1011`_, `2135c68`_) * **github-actions**: Add configuration & description of publish action (`PR#1011`_, `2135c68`_) * **github-actions**: Add description of new ``is_prerelease`` output for version action (`PR#1038`_, `6a5d35d`_) * **github-actions**: Clarify & consolidate GitHub Actions usage docs, closes `#907`_ (`PR#1011`_, `2135c68`_) * **github-actions**: Expand descriptions & clarity of actions configs (`PR#1011`_, `2135c68`_) * **github-actions**: Revert removal of namespace prefix from examples (`PR#1011`_, `2135c68`_) * **homepage**: Remove link to old github config & update token scope config (`PR#1011`_, `2135c68`_) .. _#907: https://github.com/python-semantic-release/python-semantic-release/issues/907 .. _2135c68: https://github.com/python-semantic-release/python-semantic-release/commit/2135c68ccbdad94378809902b52fcad546efd5b3 .. _6a5d35d: https://github.com/python-semantic-release/python-semantic-release/commit/6a5d35d0d9124d6a6ee7910711b4154b006b8773 .. _PR#1011: https://github.com/python-semantic-release/python-semantic-release/pull/1011 .. _PR#1038: https://github.com/python-semantic-release/python-semantic-release/pull/1038 .. _changelog-v9.8.9: v9.8.9 (2024-09-27) =================== 🪲 Bug Fixes ------------ * **version-cmd**: Ensure ``version_variables`` do not match partial variable names (`PR#1028`_, `156915c`_) * **version-cmd**: Improve ``version_variables`` flexibility w/ quotes (ie. json, yaml, etc) (`PR#1028`_, `156915c`_) * **version-cmd**: Increase ``version_variable`` flexibility with quotations (ie. json, yaml, etc), closes `#601`_, `#706`_, `#962`_, `#1026`_ (`PR#1028`_, `156915c`_) 📖 Documentation ---------------- * Update docstrings to resolve sphinx failures, closes `#1029`_ (`PR#1030`_, `d84efc7`_) * **configuration**: Add clarity to ``version_variables`` usage & limitations (`PR#1028`_, `156915c`_) * **homepage**: Re-structure homepage to be separate from project readme (`PR#1032`_, `2307ed2`_) * **README**: Simplify README to point at official docs (`PR#1032`_, `2307ed2`_) .. _#1026: https://github.com/python-semantic-release/python-semantic-release/issues/1026 .. _#1029: https://github.com/python-semantic-release/python-semantic-release/issues/1029 .. _#601: https://github.com/python-semantic-release/python-semantic-release/issues/601 .. _#706: https://github.com/python-semantic-release/python-semantic-release/issues/706 .. _#962: https://github.com/python-semantic-release/python-semantic-release/issues/962 .. _156915c: https://github.com/python-semantic-release/python-semantic-release/commit/156915c7d759098f65cf9de7c4e980b40b38d5f1 .. _2307ed2: https://github.com/python-semantic-release/python-semantic-release/commit/2307ed29d9990bf1b6821403a4b8db3365ef8bb5 .. _d84efc7: https://github.com/python-semantic-release/python-semantic-release/commit/d84efc7719a8679e6979d513d1c8c60904af7384 .. _PR#1028: https://github.com/python-semantic-release/python-semantic-release/pull/1028 .. _PR#1030: https://github.com/python-semantic-release/python-semantic-release/pull/1030 .. _PR#1032: https://github.com/python-semantic-release/python-semantic-release/pull/1032 .. _changelog-v9.8.8: v9.8.8 (2024-09-01) =================== 🪲 Bug Fixes ------------ * **config**: Fix path traversal detection for windows compatibility, closes `#994`_ (`PR#1014`_, `16e6daa`_) 📖 Documentation ---------------- * **configuration**: Update ``build_command`` env table for windows to use all capital vars (`0e8451c`_) * **github-actions**: Update version in examples to latest version (`3c894ea`_) .. _#994: https://github.com/python-semantic-release/python-semantic-release/issues/994 .. _0e8451c: https://github.com/python-semantic-release/python-semantic-release/commit/0e8451cf9003c6a3bdcae6878039d7d9a23d6d5b .. _16e6daa: https://github.com/python-semantic-release/python-semantic-release/commit/16e6daaf851ce1eabf5fbd5aa9fe310a8b0f22b3 .. _3c894ea: https://github.com/python-semantic-release/python-semantic-release/commit/3c894ea8a555d20b454ebf34785e772959bbb4fe .. _PR#1014: https://github.com/python-semantic-release/python-semantic-release/pull/1014 .. _changelog-v9.8.7: v9.8.7 (2024-08-20) =================== 🪲 Bug Fixes ------------ * Provide ``context.history`` global in release notes templates (`PR#1005`_, `5bd91b4`_) * **release-notes**: Fix noop-changelog to print raw release notes (`PR#1005`_, `5bd91b4`_) * **release-notes**: Provide ``context.history`` global in release note templates, closes `#984`_ (`PR#1005`_, `5bd91b4`_) 📖 Documentation ---------------- * Use pinned version for GHA examples (`PR#1004`_, `5fdf761`_) * **changelog**: Clarify description of the default changelog generation process (`399fa65`_) * **configuration**: Clarify ``changelog_file`` vs ``template_dir`` option usage, closes `#983`_ (`a7199c8`_) * **configuration**: Fix build_command_env table rendering (`PR#996`_, `a5eff0b`_) * **github-actions**: Adjust formatting & version warning in code snippets (`PR#1004`_, `5fdf761`_) * **github-actions**: Use pinned version for GHA examples, closes `#1003`_ (`PR#1004`_, `5fdf761`_) .. _#1003: https://github.com/python-semantic-release/python-semantic-release/issues/1003 .. _#983: https://github.com/python-semantic-release/python-semantic-release/issues/983 .. _#984: https://github.com/python-semantic-release/python-semantic-release/issues/984 .. _399fa65: https://github.com/python-semantic-release/python-semantic-release/commit/399fa6521d5c6c4397b1d6e9b13ea7945ae92543 .. _5bd91b4: https://github.com/python-semantic-release/python-semantic-release/commit/5bd91b4d7ac33ddf10446f3e66d7d11e0724aeb2 .. _5fdf761: https://github.com/python-semantic-release/python-semantic-release/commit/5fdf7614c036a77ffb051cd30f57d0a63c062c0d .. _a5eff0b: https://github.com/python-semantic-release/python-semantic-release/commit/a5eff0bfe41d2fd5d9ead152a132010b718b7772 .. _a7199c8: https://github.com/python-semantic-release/python-semantic-release/commit/a7199c8cd6041a9de017694302e49b139bbcb034 .. _PR#1004: https://github.com/python-semantic-release/python-semantic-release/pull/1004 .. _PR#1005: https://github.com/python-semantic-release/python-semantic-release/pull/1005 .. _PR#996: https://github.com/python-semantic-release/python-semantic-release/pull/996 .. _changelog-v9.8.6: v9.8.6 (2024-07-20) =================== 🪲 Bug Fixes ------------ * **version-cmd**: Resolve build command execution in powershell (`PR#980`_, `32c8e70`_) 📖 Documentation ---------------- * **configuration**: Correct GHA parameter name for commit email (`PR#981`_, `ce9ffdb`_) .. _32c8e70: https://github.com/python-semantic-release/python-semantic-release/commit/32c8e70915634d8e560b470c3cf38c27cebd7ae0 .. _ce9ffdb: https://github.com/python-semantic-release/python-semantic-release/commit/ce9ffdb82c2358184b288fa18e83a4075f333277 .. _PR#980: https://github.com/python-semantic-release/python-semantic-release/pull/980 .. _PR#981: https://github.com/python-semantic-release/python-semantic-release/pull/981 .. _changelog-v9.8.5: v9.8.5 (2024-07-06) =================== 🪲 Bug Fixes ------------ * Enable ``--print-last-released*`` when in detached head or non-release branch (`PR#926`_, `782c0a6`_) * **changelog**: Resolve commit ordering issue when dates are similar (`PR#972`_, `bfda159`_) * **version-cmd**: Drop branch restriction for ``--print-last-released*`` opts, closes `#900`_ (`PR#926`_, `782c0a6`_) âš¡ Performance Improvements --------------------------- * Improve git history processing for changelog generation (`PR#972`_, `bfda159`_) * **changelog**: Improve git history parser changelog generation (`PR#972`_, `bfda159`_) .. _#900: https://github.com/python-semantic-release/python-semantic-release/issues/900 .. _782c0a6: https://github.com/python-semantic-release/python-semantic-release/commit/782c0a6109fb49e168c37f279928c0a4959f8ac6 .. _bfda159: https://github.com/python-semantic-release/python-semantic-release/commit/bfda1593af59e9e728c584dd88d7927fc52c879f .. _PR#926: https://github.com/python-semantic-release/python-semantic-release/pull/926 .. _PR#972: https://github.com/python-semantic-release/python-semantic-release/pull/972 .. _changelog-v9.8.4: v9.8.4 (2024-07-04) =================== 🪲 Bug Fixes ------------ * **changelog-cmd**: Remove usage strings when error occurred, closes `#810`_ (`348a51d`_) * **changelog-cmd**: Render default changelog when user template directory exist but is empty (`bded8de`_) * **config**: Prevent path traversal manipulation of target changelog location (`43e35d0`_) * **config**: Prevent path traversal manipulation of target changelog location (`3eb3dba`_) * **publish-cmd**: Prevent error when provided tag does not exist locally (`16afbbb`_) * **publish-cmd**: Remove usage strings when error occurred, closes `#810`_ (`afbb187`_) * **version-cmd**: Remove usage strings when error occurred, closes `#810`_ (`a7c17c7`_) .. _#810: https://github.com/python-semantic-release/python-semantic-release/issues/810 .. _16afbbb: https://github.com/python-semantic-release/python-semantic-release/commit/16afbbb8fbc3a97243e96d7573f4ad2eba09aab9 .. _348a51d: https://github.com/python-semantic-release/python-semantic-release/commit/348a51db8a837d951966aff3789aa0c93d473829 .. _3eb3dba: https://github.com/python-semantic-release/python-semantic-release/commit/3eb3dbafec4223ee463b90e927e551639c69426b .. _43e35d0: https://github.com/python-semantic-release/python-semantic-release/commit/43e35d0972e8a29239d18ed079d1e2013342fcbd .. _a7c17c7: https://github.com/python-semantic-release/python-semantic-release/commit/a7c17c73fd7becb6d0e042e45ff6765605187e2a .. _afbb187: https://github.com/python-semantic-release/python-semantic-release/commit/afbb187d6d405fdf6765082e2a1cecdcd7d357df .. _bded8de: https://github.com/python-semantic-release/python-semantic-release/commit/bded8deae6c92f6dde9774802d9f3716a5cb5705 .. _changelog-v9.8.3: v9.8.3 (2024-06-18) =================== 🪲 Bug Fixes ------------ * **parser**: Strip DOS carriage-returns in commits, closes `#955`_ (`PR#956`_, `0b005df`_) .. _#955: https://github.com/python-semantic-release/python-semantic-release/issues/955 .. _0b005df: https://github.com/python-semantic-release/python-semantic-release/commit/0b005df0a8c7730ee0c71453c9992d7b5d2400a4 .. _PR#956: https://github.com/python-semantic-release/python-semantic-release/pull/956 .. _changelog-v9.8.2: v9.8.2 (2024-06-17) =================== 🪲 Bug Fixes ------------ * **templates**: Suppress extra newlines in default changelog (`PR#954`_, `7b0079b`_) .. _7b0079b: https://github.com/python-semantic-release/python-semantic-release/commit/7b0079bf3e17c0f476bff520b77a571aeac469d0 .. _PR#954: https://github.com/python-semantic-release/python-semantic-release/pull/954 .. _changelog-v9.8.1: v9.8.1 (2024-06-05) =================== 🪲 Bug Fixes ------------ * Improve build cmd env on windows (`PR#942`_, `d911fae`_) * **version-cmd**: Pass windows specific env vars to build cmd when on windows (`PR#942`_, `d911fae`_) 📖 Documentation ---------------- * **configuration**: Define windows specific env vars for build cmd (`PR#942`_, `d911fae`_) .. _d911fae: https://github.com/python-semantic-release/python-semantic-release/commit/d911fae993d41a8cb1497fa8b2a7e823576e0f22 .. _PR#942: https://github.com/python-semantic-release/python-semantic-release/pull/942 .. _changelog-v9.8.0: v9.8.0 (2024-05-27) =================== ✨ Features ----------- * Extend gitlab to edit a previous release if exists (`PR#934`_, `23e02b9`_) * **gha**: Configure ssh signed tags in GitHub Action, closes `#936`_ (`PR#937`_, `dfb76b9`_) * **hvcs-gitlab**: Enable gitlab to edit a previous release if found (`PR#934`_, `23e02b9`_) * **version-cmd**: Add toggle of ``--no-verify`` option to ``git commit`` (`PR#927`_, `1de6f78`_) 🪲 Bug Fixes ------------ * **gitlab**: Adjust release name to mirror other hvcs release names (`PR#934`_, `23e02b9`_) * **hvcs-gitlab**: Add tag message to release creation (`PR#934`_, `23e02b9`_) 📖 Documentation ---------------- * **configuration**: Add ``no_git_verify`` description to the configuration page (`PR#927`_, `1de6f78`_) * **migration-v8**: Update version references in migration instructions (`PR#938`_, `d6ba16a`_) .. _#936: https://github.com/python-semantic-release/python-semantic-release/issues/936 .. _1de6f78: https://github.com/python-semantic-release/python-semantic-release/commit/1de6f7834c6d37a74bc53f91609d40793556b52d .. _23e02b9: https://github.com/python-semantic-release/python-semantic-release/commit/23e02b96dfb2a58f6b4ecf7b7812e4c1bc50573d .. _d6ba16a: https://github.com/python-semantic-release/python-semantic-release/commit/d6ba16aa8e01bae1a022a9b06cd0b9162c51c345 .. _dfb76b9: https://github.com/python-semantic-release/python-semantic-release/commit/dfb76b94b859a7f3fa3ad778eec7a86de2874d68 .. _PR#927: https://github.com/python-semantic-release/python-semantic-release/pull/927 .. _PR#934: https://github.com/python-semantic-release/python-semantic-release/pull/934 .. _PR#937: https://github.com/python-semantic-release/python-semantic-release/pull/937 .. _PR#938: https://github.com/python-semantic-release/python-semantic-release/pull/938 .. _changelog-v9.7.3: v9.7.3 (2024-05-15) =================== 🪲 Bug Fixes ------------ * Enabled ``prerelease-token`` parameter in github action (`PR#929`_, `1bb26b0`_) .. _1bb26b0: https://github.com/python-semantic-release/python-semantic-release/commit/1bb26b0762d94efd97c06a3f1b6b10fb76901f6d .. _PR#929: https://github.com/python-semantic-release/python-semantic-release/pull/929 .. _changelog-v9.7.2: v9.7.2 (2024-05-13) =================== 🪲 Bug Fixes ------------ * Enable user configuration of ``build_command`` env vars (`PR#925`_, `6b5b271`_) * **version**: Enable user config of ``build_command`` env variables, closes `#922`_ (`PR#925`_, `6b5b271`_) 📖 Documentation ---------------- * **configuration**: Clarify TOC & alphabetize configuration descriptions (`19add16`_) * **configuration**: Clarify TOC & standardize heading links (`3a41995`_) * **configuration**: Document ``build_command_env`` configuration option (`PR#925`_, `6b5b271`_) * **CONTRIBUTING**: Update build command definition for developers (`PR#921`_, `b573c4d`_) .. _#922: https://github.com/python-semantic-release/python-semantic-release/issues/922 .. _19add16: https://github.com/python-semantic-release/python-semantic-release/commit/19add16dcfdfdb812efafe2d492a933d0856df1d .. _3a41995: https://github.com/python-semantic-release/python-semantic-release/commit/3a4199542d0ea4dbf88fa35e11bec41d0c27dd17 .. _6b5b271: https://github.com/python-semantic-release/python-semantic-release/commit/6b5b271453874b982fbf2827ec1f6be6db1c2cc7 .. _b573c4d: https://github.com/python-semantic-release/python-semantic-release/commit/b573c4d4a2c212be9bdee918501bb5e046c6a806 .. _PR#921: https://github.com/python-semantic-release/python-semantic-release/pull/921 .. _PR#925: https://github.com/python-semantic-release/python-semantic-release/pull/925 .. _changelog-v9.7.1: v9.7.1 (2024-05-07) =================== 🪲 Bug Fixes ------------ * **gha**: Fix missing ``git_committer_*`` definition in action, closes `#918`_ (`PR#919`_, `ccef9d8`_) .. _#918: https://github.com/python-semantic-release/python-semantic-release/issues/918 .. _ccef9d8: https://github.com/python-semantic-release/python-semantic-release/commit/ccef9d8521be12c0640369b3c3a80b81a7832662 .. _PR#919: https://github.com/python-semantic-release/python-semantic-release/pull/919 .. _changelog-v9.7.0: v9.7.0 (2024-05-06) =================== ✨ Features ----------- * **version-cmd**: Pass ``NEW_VERSION`` & useful env vars to build command (`ee6b246`_) 🪲 Bug Fixes ------------ * **gha**: Add missing ``tag`` option to GitHub Action definition, closes `#906`_ (`PR#908`_, `6b24288`_) * **gha**: Correct use of ``prerelease`` option for GitHub Action (`PR#914`_, `85e27b7`_) 📖 Documentation ---------------- * **configuration**: Add description of build command available env variables (`c882dc6`_) * **gha**: Update GitHub Actions doc with all available options (`PR#914`_, `85e27b7`_) âš™ï¸ Build System ---------------- * **deps**: Bump GitHub Action container to use ``python3.12``, closes `#801`_ (`PR#914`_, `85e27b7`_) .. _#801: https://github.com/python-semantic-release/python-semantic-release/issues/801 .. _#906: https://github.com/python-semantic-release/python-semantic-release/issues/906 .. _6b24288: https://github.com/python-semantic-release/python-semantic-release/commit/6b24288a96302cd6982260e46fad128ec4940da9 .. _85e27b7: https://github.com/python-semantic-release/python-semantic-release/commit/85e27b7f486e6b0e6cc9e85e101a97e676bc3d60 .. _c882dc6: https://github.com/python-semantic-release/python-semantic-release/commit/c882dc62b860b2aeaa925c21d1524f4ae25ef567 .. _ee6b246: https://github.com/python-semantic-release/python-semantic-release/commit/ee6b246df3bb211ab49c8bce075a4c3f6a68ed77 .. _PR#908: https://github.com/python-semantic-release/python-semantic-release/pull/908 .. _PR#914: https://github.com/python-semantic-release/python-semantic-release/pull/914 .. _changelog-v9.6.0: v9.6.0 (2024-04-29) =================== ✨ Features ----------- * Changelog filters are specialized per vcs type (`PR#890`_, `76ed593`_) * **changelog**: Changelog filters are hvcs focused (`PR#890`_, `76ed593`_) * **changelog-context**: Add flag to jinja env for which hvcs is available (`PR#890`_, `76ed593`_) * **changelog-gitea**: Add issue url filter to changelog context (`PR#890`_, `76ed593`_) * **changelog-github**: Add issue url filter to changelog context (`PR#890`_, `76ed593`_) * **version-cmd**: Add ``--as-prerelease`` option to force the next version to be a prerelease, closes `#639`_ (`PR#647`_, `2acb5ac`_) 🪲 Bug Fixes ------------ * Correct version ``--prerelease`` use & enable ``--as-prerelease`` (`PR#647`_, `2acb5ac`_) * **github**: Correct changelog filter for pull request urls (`PR#890`_, `76ed593`_) * **parser-custom**: Gracefully handle custom parser import errors (`67f6038`_) * **version-cmd**: Correct ``--prerelease`` use, closes `#639`_ (`PR#647`_, `2acb5ac`_) 📖 Documentation ---------------- * **changelog-context**: Explain new hvcs specific context filters (`PR#890`_, `76ed593`_) * **commands**: Update version command options definition about prereleases (`PR#647`_, `2acb5ac`_) .. _#639: https://github.com/python-semantic-release/python-semantic-release/issues/639 .. _2acb5ac: https://github.com/python-semantic-release/python-semantic-release/commit/2acb5ac35ae79d7ae25ca9a03fb5c6a4a68b3673 .. _67f6038: https://github.com/python-semantic-release/python-semantic-release/commit/67f60389e3f6e93443ea108c0e1b4d30126b8e06 .. _76ed593: https://github.com/python-semantic-release/python-semantic-release/commit/76ed593ea33c851005994f0d1a6a33cc890fb908 .. _PR#647: https://github.com/python-semantic-release/python-semantic-release/pull/647 .. _PR#890: https://github.com/python-semantic-release/python-semantic-release/pull/890 .. _changelog-v9.5.0: v9.5.0 (2024-04-23) =================== ✨ Features ----------- * Extend support to on-prem GitHub Enterprise Server (`PR#896`_, `4fcb737`_) * **github**: Extend support to on-prem GitHub Enterprise Server, closes `#895`_ (`PR#896`_, `4fcb737`_) .. _#895: https://github.com/python-semantic-release/python-semantic-release/issues/895 .. _4fcb737: https://github.com/python-semantic-release/python-semantic-release/commit/4fcb737958d95d1a3be24db7427e137b46f5075f .. _PR#896: https://github.com/python-semantic-release/python-semantic-release/pull/896 .. _changelog-v9.4.2: v9.4.2 (2024-04-14) =================== 🪲 Bug Fixes ------------ * **bitbucket**: Allow insecure http connections if configured (`PR#886`_, `db13438`_) * **bitbucket**: Correct url parsing & prevent double url schemes (`PR#676`_, `5cfdb24`_) * **config**: Add flag to allow insecure connections (`PR#886`_, `db13438`_) * **gitea**: Allow insecure http connections if configured (`PR#886`_, `db13438`_) * **gitea**: Correct url parsing & prevent double url schemes (`PR#676`_, `5cfdb24`_) * **github**: Allow insecure http connections if configured (`PR#886`_, `db13438`_) * **github**: Correct url parsing & prevent double url schemes (`PR#676`_, `5cfdb24`_) * **gitlab**: Allow insecure http connections if configured (`PR#886`_, `db13438`_) * **gitlab**: Correct url parsing & prevent double url schemes (`PR#676`_, `5cfdb24`_) * **hvcs**: Allow insecure http connections if configured (`PR#886`_, `db13438`_) * **hvcs**: Prevent double protocol scheme urls in changelogs (`PR#676`_, `5cfdb24`_) * **version-cmd**: Handle HTTP exceptions more gracefully (`PR#886`_, `db13438`_) 📖 Documentation ---------------- * **configuration**: Update ``remote`` settings section with missing values, closes `#868`_ (`PR#886`_, `db13438`_) âš™ï¸ Build System ---------------- * **deps**: Update rich requirement from ~=12.5 to ~=13.0, closes `#888`_ (`PR#877`_, `4a22a8c`_) .. _#868: https://github.com/python-semantic-release/python-semantic-release/issues/868 .. _#888: https://github.com/python-semantic-release/python-semantic-release/issues/888 .. _4a22a8c: https://github.com/python-semantic-release/python-semantic-release/commit/4a22a8c1a69bcf7b1ddd6db56e6883c617a892b3 .. _5cfdb24: https://github.com/python-semantic-release/python-semantic-release/commit/5cfdb248c003a2d2be5fe65fb61d41b0d4c45db5 .. _db13438: https://github.com/python-semantic-release/python-semantic-release/commit/db1343890f7e0644bc8457f995f2bd62087513d3 .. _PR#676: https://github.com/python-semantic-release/python-semantic-release/pull/676 .. _PR#877: https://github.com/python-semantic-release/python-semantic-release/pull/877 .. _PR#886: https://github.com/python-semantic-release/python-semantic-release/pull/886 .. _changelog-v9.4.1: v9.4.1 (2024-04-06) =================== 🪲 Bug Fixes ------------ * **gh-actions-output**: Fixed trailing newline to match GITHUB_OUTPUT format (`PR#885`_, `2c7b6ec`_) * **gh-actions-output**: Fixed trailing newline to match GITHUB_OUTPUT format, closes `#884`_ (`PR#885`_, `2c7b6ec`_) .. _#884: https://github.com/python-semantic-release/python-semantic-release/issues/884 .. _2c7b6ec: https://github.com/python-semantic-release/python-semantic-release/commit/2c7b6ec85b6e3182463d7b695ee48e9669a25b3b .. _PR#885: https://github.com/python-semantic-release/python-semantic-release/pull/885 .. _changelog-v9.4.0: v9.4.0 (2024-03-31) =================== ✨ Features ----------- * **gitea**: Derives gitea api domain from base domain when unspecified (`PR#675`_, `2ee3f8a`_) .. _2ee3f8a: https://github.com/python-semantic-release/python-semantic-release/commit/2ee3f8a918d2e5ea9ab64df88f52e62a1f589c38 .. _PR#675: https://github.com/python-semantic-release/python-semantic-release/pull/675 .. _changelog-v9.3.1: v9.3.1 (2024-03-24) =================== 🪲 Bug Fixes ------------ * **algorithm**: Handle merge-base errors gracefully, closes `#724`_ (`4c998b7`_) * **cli-version**: Change implementation to only push the tag we generated, closes `#803`_ (`8a9da4f`_) âš¡ Performance Improvements --------------------------- * **algorithm**: Simplify logs & use lookup when searching for commit & tag match (`3690b95`_) .. _#724: https://github.com/python-semantic-release/python-semantic-release/issues/724 .. _#803: https://github.com/python-semantic-release/python-semantic-release/issues/803 .. _3690b95: https://github.com/python-semantic-release/python-semantic-release/commit/3690b9511de633ab38083de4d2505b6d05853346 .. _4c998b7: https://github.com/python-semantic-release/python-semantic-release/commit/4c998b77a3fe5e12783d1ab2d47789a10b83f247 .. _8a9da4f: https://github.com/python-semantic-release/python-semantic-release/commit/8a9da4feb8753e3ab9ea752afa25decd2047675a .. _changelog-v9.3.0: v9.3.0 (2024-03-21) =================== ✨ Features ----------- * **cmd-version**: Changelog available to bundle (`PR#779`_, `37fdb28`_) * **cmd-version**: Create changelog prior to build enabling doc bundling (`PR#779`_, `37fdb28`_) .. _37fdb28: https://github.com/python-semantic-release/python-semantic-release/commit/37fdb28e0eb886d682b5dea4cc83a7c98a099422 .. _PR#779: https://github.com/python-semantic-release/python-semantic-release/pull/779 .. _changelog-v9.2.2: v9.2.2 (2024-03-19) =================== 🪲 Bug Fixes ------------ * **cli**: Enable subcommand help even if config is invalid, closes `#840`_ (`91d221a`_) .. _#840: https://github.com/python-semantic-release/python-semantic-release/issues/840 .. _91d221a: https://github.com/python-semantic-release/python-semantic-release/commit/91d221a01266e5ca6de5c73296b0a90987847494 .. _changelog-v9.2.1: v9.2.1 (2024-03-19) =================== 🪲 Bug Fixes ------------ * **parse-git-url**: Handle urls with url-safe special characters (`27cd93a`_) .. _27cd93a: https://github.com/python-semantic-release/python-semantic-release/commit/27cd93a0a65ee3787ca51be4c91c48f6ddb4269c .. _changelog-v9.2.0: v9.2.0 (2024-03-18) =================== ✨ Features ----------- * **version**: Add new version print flags to display the last released version and tag (`814240c`_) * **version-config**: Add option to disable 0.x.x versions (`dedb3b7`_) 🪲 Bug Fixes ------------ * **changelog**: Make sure default templates render ending in 1 newline (`0b4a45e`_) * **changelog-generation**: Fix incorrect release timezone determination (`f802446`_) 📖 Documentation ---------------- * **configuration**: Add description of ``allow-zero-version`` configuration option (`4028f83`_) * **configuration**: Clarify the ``major_on_zero`` configuration option (`f7753cd`_) âš™ï¸ Build System ---------------- * **deps**: Add click-option-group for grouping exclusive flags (`bd892b8`_) .. _0b4a45e: https://github.com/python-semantic-release/python-semantic-release/commit/0b4a45e3673d0408016dc8e7b0dce98007a763e3 .. _4028f83: https://github.com/python-semantic-release/python-semantic-release/commit/4028f8384a0181c8d58c81ae81cf0b241a02a710 .. _814240c: https://github.com/python-semantic-release/python-semantic-release/commit/814240c7355df95e9be9a6ed31d004b800584bc0 .. _bd892b8: https://github.com/python-semantic-release/python-semantic-release/commit/bd892b89c26df9fccc9335c84e2b3217e3e02a37 .. _dedb3b7: https://github.com/python-semantic-release/python-semantic-release/commit/dedb3b765c8530379af61d3046c3bb9c160d54e5 .. _f7753cd: https://github.com/python-semantic-release/python-semantic-release/commit/f7753cdabd07e276bc001478d605fca9a4b37ec4 .. _f802446: https://github.com/python-semantic-release/python-semantic-release/commit/f802446bd0693c4c9f6bdfdceae8b89c447827d2 .. _changelog-v9.1.1: v9.1.1 (2024-02-25) =================== 🪲 Bug Fixes ------------ * **parse_git_url**: Fix bad url with dash (`1c25b8e`_) .. _1c25b8e: https://github.com/python-semantic-release/python-semantic-release/commit/1c25b8e6f1e43c15ca7d5a59dca0a13767f9bc33 .. _changelog-v9.1.0: v9.1.0 (2024-02-14) =================== ✨ Features ----------- * Add bitbucket hvcs (`bbbbfeb`_) 🪲 Bug Fixes ------------ * Remove unofficial environment variables (`a5168e4`_) 📖 Documentation ---------------- * Add bitbucket authentication (`b78a387`_) * Add bitbucket to token table (`56f146d`_) * Fix typo (`b240e12`_) âš™ï¸ Build System ---------------- * **deps**: Bump minimum required ``tomlkit`` to ``>=0.11.0``, closes `#834`_ (`291aace`_) .. _#834: https://github.com/python-semantic-release/python-semantic-release/issues/834 .. _291aace: https://github.com/python-semantic-release/python-semantic-release/commit/291aacea1d0429a3b27e92b0a20b598f43f6ea6b .. _56f146d: https://github.com/python-semantic-release/python-semantic-release/commit/56f146d9f4c0fc7f2a84ad11b21c8c45e9221782 .. _a5168e4: https://github.com/python-semantic-release/python-semantic-release/commit/a5168e40b9a14dbd022f62964f382b39faf1e0df .. _b240e12: https://github.com/python-semantic-release/python-semantic-release/commit/b240e129b180d45c1d63d464283b7dfbcb641d0c .. _b78a387: https://github.com/python-semantic-release/python-semantic-release/commit/b78a387d8eccbc1a6a424a183254fc576126199c .. _bbbbfeb: https://github.com/python-semantic-release/python-semantic-release/commit/bbbbfebff33dd24b8aed2d894de958d532eac596 .. _changelog-v9.0.3: v9.0.3 (2024-02-08) =================== 🪲 Bug Fixes ------------ * **algorithm**: Correct bfs to not abort on previously visited node (`02df305`_) âš¡ Performance Improvements --------------------------- * **algorithm**: Refactor bfs search to use queue rather than recursion (`8b742d3`_) .. _02df305: https://github.com/python-semantic-release/python-semantic-release/commit/02df305db43abfc3a1f160a4a52cc2afae5d854f .. _8b742d3: https://github.com/python-semantic-release/python-semantic-release/commit/8b742d3db6652981a7b5f773a74b0534edc1fc15 .. _changelog-v9.0.2: v9.0.2 (2024-02-08) =================== 🪲 Bug Fixes ------------ * **util**: Properly parse windows line-endings in commit messages, closes `#820`_ (`70193ba`_) 📖 Documentation ---------------- * Remove duplicate note in configuration.rst (`PR#807`_, `fb6f243`_) .. _#820: https://github.com/python-semantic-release/python-semantic-release/issues/820 .. _70193ba: https://github.com/python-semantic-release/python-semantic-release/commit/70193ba117c1a6d3690aed685fee8a734ba174e5 .. _fb6f243: https://github.com/python-semantic-release/python-semantic-release/commit/fb6f243a141642c02469f1080180ecaf4f3cec66 .. _PR#807: https://github.com/python-semantic-release/python-semantic-release/pull/807 .. _changelog-v9.0.1: v9.0.1 (2024-02-06) =================== 🪲 Bug Fixes ------------ * **config**: Set commit parser opt defaults based on parser choice (`PR#782`_, `9c594fb`_) .. _9c594fb: https://github.com/python-semantic-release/python-semantic-release/commit/9c594fb6efac7e4df2b0bfbd749777d3126d03d7 .. _PR#782: https://github.com/python-semantic-release/python-semantic-release/pull/782 .. _changelog-v9.0.0: v9.0.0 (2024-02-06) =================== â™»ï¸ Refactoring --------------- * Drop support for Python 3.7 (`PR#828`_, `ad086f5`_) 💥 BREAKING CHANGES -------------------- * Removed Python 3.7 specific control flows and made more modern implementations the default control flow without a bypass or workaround. Will break on Python 3.7 now. If you require Python 3.7, you should lock your major version at v8. Since we only have enough manpower to maintain the latest major release, unfortunately there will not be any more updates to v8. * We decided to remove support for Python 3.7 because it has been officially deprecated by the Python Foundation over a year ago and our codebase is starting to have limitations and custom implementations just to maintain support for 3.7. .. _ad086f5: https://github.com/python-semantic-release/python-semantic-release/commit/ad086f5993ae4741d6e20fee618d1bce8df394fb .. _PR#828: https://github.com/python-semantic-release/python-semantic-release/pull/828 .. _changelog-v8.7.2: v8.7.2 (2024-01-03) =================== 🪲 Bug Fixes ------------ * **lint**: Correct linter errors (`c9556b0`_) .. _c9556b0: https://github.com/python-semantic-release/python-semantic-release/commit/c9556b0ca6df6a61e9ce909d18bc5be8b6154bf8 .. _changelog-v8.7.1: v8.7.1 (2024-01-03) =================== 🪲 Bug Fixes ------------ * **cli-generate-config**: Ensure configuration types are always toml parsable (`PR#785`_, `758e649`_) 📖 Documentation ---------------- * Add note on default envvar behavior (`PR#780`_, `0b07cae`_) * **configuration**: Change defaults definition of token default to table (`PR#786`_, `df1df0d`_) * **contributing**: Add docs-build, testing conf, & build instructions (`PR#787`_, `011b072`_) .. _011b072: https://github.com/python-semantic-release/python-semantic-release/commit/011b0729cba3045b4e7291fd970cb17aad7bae60 .. _0b07cae: https://github.com/python-semantic-release/python-semantic-release/commit/0b07cae71915c5c82d7784898b44359249542a64 .. _758e649: https://github.com/python-semantic-release/python-semantic-release/commit/758e64975fe46b961809f35977574729b7c44271 .. _df1df0d: https://github.com/python-semantic-release/python-semantic-release/commit/df1df0de8bc655cbf8f86ae52aff10efdc66e6d2 .. _PR#780: https://github.com/python-semantic-release/python-semantic-release/pull/780 .. _PR#785: https://github.com/python-semantic-release/python-semantic-release/pull/785 .. _PR#786: https://github.com/python-semantic-release/python-semantic-release/pull/786 .. _PR#787: https://github.com/python-semantic-release/python-semantic-release/pull/787 .. _changelog-v8.7.0: v8.7.0 (2023-12-22) =================== ✨ Features ----------- * **config**: Enable default environment token per hvcs (`PR#774`_, `26528eb`_) .. _26528eb: https://github.com/python-semantic-release/python-semantic-release/commit/26528eb8794d00dfe985812269702fbc4c4ec788 .. _PR#774: https://github.com/python-semantic-release/python-semantic-release/pull/774 .. _changelog-v8.6.0: v8.6.0 (2023-12-22) =================== ✨ Features ----------- * **utils**: Expand parsable valid git remote url formats (`PR#771`_, `cf75f23`_) 📖 Documentation ---------------- * Minor correction to commit-parsing documentation (`PR#777`_, `245e878`_) .. _245e878: https://github.com/python-semantic-release/python-semantic-release/commit/245e878f02d5cafec6baf0493c921c1e396b56e8 .. _cf75f23: https://github.com/python-semantic-release/python-semantic-release/commit/cf75f237360488ebb0088e5b8aae626e97d9cbdd .. _PR#771: https://github.com/python-semantic-release/python-semantic-release/pull/771 .. _PR#777: https://github.com/python-semantic-release/python-semantic-release/pull/777 .. _changelog-v8.5.2: v8.5.2 (2023-12-19) =================== 🪲 Bug Fixes ------------ * **cli**: Gracefully output configuration validation errors (`PR#772`_, `e8c9d51`_) .. _e8c9d51: https://github.com/python-semantic-release/python-semantic-release/commit/e8c9d516c37466a5dce75a73766d5be0f9e74627 .. _PR#772: https://github.com/python-semantic-release/python-semantic-release/pull/772 .. _changelog-v8.5.1: v8.5.1 (2023-12-12) =================== 🪲 Bug Fixes ------------ * **cmd-version**: Handle committing of git-ignored file gracefully (`PR#764`_, `ea89fa7`_) * **config**: Cleanly handle repository in detached HEAD state (`PR#765`_, `ac4f9aa`_) * **config**: Gracefully fail when repo is in a detached HEAD state (`PR#765`_, `ac4f9aa`_) * **version**: Only commit non git-ignored files during version commit (`PR#764`_, `ea89fa7`_) 📖 Documentation ---------------- * **configuration**: Adjust wording and improve clarity (`PR#766`_, `6b2fc8c`_) * **configuration**: Fix typo in text (`PR#766`_, `6b2fc8c`_) .. _6b2fc8c: https://github.com/python-semantic-release/python-semantic-release/commit/6b2fc8c156e122ee1b43fdb513b2dc3b8fd76724 .. _ac4f9aa: https://github.com/python-semantic-release/python-semantic-release/commit/ac4f9aacb72c99f2479ae33369822faad011a824 .. _ea89fa7: https://github.com/python-semantic-release/python-semantic-release/commit/ea89fa72885e15da91687172355426a22c152513 .. _PR#764: https://github.com/python-semantic-release/python-semantic-release/pull/764 .. _PR#765: https://github.com/python-semantic-release/python-semantic-release/pull/765 .. _PR#766: https://github.com/python-semantic-release/python-semantic-release/pull/766 .. _changelog-v8.5.0: v8.5.0 (2023-12-07) =================== ✨ Features ----------- * Allow template directories to contain a '.' at the top-level (`PR#762`_, `07b232a`_) .. _07b232a: https://github.com/python-semantic-release/python-semantic-release/commit/07b232a3b34be0b28c6af08aea4852acb1b9bd56 .. _PR#762: https://github.com/python-semantic-release/python-semantic-release/pull/762 .. _changelog-v8.4.0: v8.4.0 (2023-12-07) =================== ✨ Features ----------- * **cmd-version**: Add ``--tag/--no-tag`` option to version command (`PR#752`_, `de6b9ad`_) * **version**: Add ``--no-tag`` option to turn off tag creation (`PR#752`_, `de6b9ad`_) 🪲 Bug Fixes ------------ * **version**: Separate push tags from commit push when not committing changes (`PR#752`_, `de6b9ad`_) 📖 Documentation ---------------- * **commands**: Update ``version`` subcommand options (`PR#752`_, `de6b9ad`_) * **migration**: Fix comments about publish command (`PR#747`_, `90380d7`_) .. _90380d7: https://github.com/python-semantic-release/python-semantic-release/commit/90380d797a734dcca5040afc5fa00e3e01f64152 .. _de6b9ad: https://github.com/python-semantic-release/python-semantic-release/commit/de6b9ad921e697b5ea2bb2ea8f180893cecca920 .. _PR#747: https://github.com/python-semantic-release/python-semantic-release/pull/747 .. _PR#752: https://github.com/python-semantic-release/python-semantic-release/pull/752 .. _changelog-v8.3.0: v8.3.0 (2023-10-23) =================== ✨ Features ----------- * **action**: Use composite action for semantic release (`PR#692`_, `4648d87`_) .. _4648d87: https://github.com/python-semantic-release/python-semantic-release/commit/4648d87bac8fb7e6cc361b765b4391b30a8caef8 .. _PR#692: https://github.com/python-semantic-release/python-semantic-release/pull/692 .. _changelog-v8.2.0: v8.2.0 (2023-10-23) =================== ✨ Features ----------- * Allow user customization of release notes template (`PR#736`_, `94a1311`_) 📖 Documentation ---------------- * Add PYTHONPATH mention for commit parser (`3284258`_) .. _3284258: https://github.com/python-semantic-release/python-semantic-release/commit/3284258b9fa1a3fe165f336181aff831d50fddd3 .. _94a1311: https://github.com/python-semantic-release/python-semantic-release/commit/94a131167e1b867f8bc112a042b9766e050ccfd1 .. _PR#736: https://github.com/python-semantic-release/python-semantic-release/pull/736 .. _changelog-v8.1.2: v8.1.2 (2023-10-13) =================== 🪲 Bug Fixes ------------ * Correct lint errors (`a13a6c3`_) * Error when running build command on windows systems (`PR#732`_, `2553657`_) .. _2553657: https://github.com/python-semantic-release/python-semantic-release/commit/25536574760b407410f435441da533fafbf94402 .. _a13a6c3: https://github.com/python-semantic-release/python-semantic-release/commit/a13a6c37e180dc422599939a5725835306c18ff2 .. _PR#732: https://github.com/python-semantic-release/python-semantic-release/pull/732 .. _changelog-v8.1.1: v8.1.1 (2023-09-19) =================== 🪲 Bug Fixes ------------ * Attribute error when logging non-strings (`PR#711`_, `75e6e48`_) .. _75e6e48: https://github.com/python-semantic-release/python-semantic-release/commit/75e6e48129da8238a62d5eccac1ae55d0fee0f9f .. _PR#711: https://github.com/python-semantic-release/python-semantic-release/pull/711 .. _changelog-v8.1.0: v8.1.0 (2023-09-19) =================== ✨ Features ----------- * Upgrade pydantic to v2 (`PR#714`_, `5a5c5d0`_) 📖 Documentation ---------------- * Fix typos (`PR#708`_, `2698b0e`_) * Update project urls (`PR#715`_, `5fd5485`_) .. _2698b0e: https://github.com/python-semantic-release/python-semantic-release/commit/2698b0e006ff7e175430b98450ba248ed523b341 .. _5a5c5d0: https://github.com/python-semantic-release/python-semantic-release/commit/5a5c5d0ee347750d7c417c3242d52e8ada50b217 .. _5fd5485: https://github.com/python-semantic-release/python-semantic-release/commit/5fd54856dfb6774feffc40d36d5bb0f421f04842 .. _PR#708: https://github.com/python-semantic-release/python-semantic-release/pull/708 .. _PR#714: https://github.com/python-semantic-release/python-semantic-release/pull/714 .. _PR#715: https://github.com/python-semantic-release/python-semantic-release/pull/715 .. _changelog-v8.0.8: v8.0.8 (2023-08-26) =================== 🪲 Bug Fixes ------------ * Dynamic_import() import path split (`PR#686`_, `1007a06`_) .. _1007a06: https://github.com/python-semantic-release/python-semantic-release/commit/1007a06d1e16beef6d18f44ff2e0e09921854b54 .. _PR#686: https://github.com/python-semantic-release/python-semantic-release/pull/686 .. _changelog-v8.0.7: v8.0.7 (2023-08-16) =================== 🪲 Bug Fixes ------------ * Use correct upload url for github (`PR#661`_, `8a515ca`_) .. _8a515ca: https://github.com/python-semantic-release/python-semantic-release/commit/8a515caf1f993aa653e024beda2fdb9e629cc42a .. _PR#661: https://github.com/python-semantic-release/python-semantic-release/pull/661 .. _changelog-v8.0.6: v8.0.6 (2023-08-13) =================== 🪲 Bug Fixes ------------ * **publish**: Improve error message when no tags found (`PR#683`_, `bdc06ea`_) .. _bdc06ea: https://github.com/python-semantic-release/python-semantic-release/commit/bdc06ea061c19134d5d74bd9f168700dd5d9bcf5 .. _PR#683: https://github.com/python-semantic-release/python-semantic-release/pull/683 .. _changelog-v8.0.5: v8.0.5 (2023-08-10) =================== 🪲 Bug Fixes ------------ * Don't warn about vcs token if ignore_token_for_push is true. (`PR#670`_, `f1a54a6`_) 📖 Documentation ---------------- * ``password`` should be ``token``. (`PR#670`_, `f1a54a6`_) * Fix typo missing 's' in version_variable[s] in configuration.rst (`PR#668`_, `879186a`_) .. _879186a: https://github.com/python-semantic-release/python-semantic-release/commit/879186aa09a3bea8bbe2b472f892cf7c0712e557 .. _f1a54a6: https://github.com/python-semantic-release/python-semantic-release/commit/f1a54a6c9a05b225b6474d50cd610eca19ec0c34 .. _PR#668: https://github.com/python-semantic-release/python-semantic-release/pull/668 .. _PR#670: https://github.com/python-semantic-release/python-semantic-release/pull/670 .. _changelog-v8.0.4: v8.0.4 (2023-07-26) =================== 🪲 Bug Fixes ------------ * **changelog**: Use version as semver tag by default (`PR#653`_, `5984c77`_) 📖 Documentation ---------------- * Add Python 3.11 to classifiers in metadata (`PR#651`_, `5a32a24`_) * Clarify usage of assets config option (`PR#655`_, `efa2b30`_) .. _5984c77: https://github.com/python-semantic-release/python-semantic-release/commit/5984c7771edc37f0d7d57894adecc2591efc414d .. _5a32a24: https://github.com/python-semantic-release/python-semantic-release/commit/5a32a24bf4128c39903f0c5d3bd0cb1ccba57e18 .. _efa2b30: https://github.com/python-semantic-release/python-semantic-release/commit/efa2b3019b41eb427f0e1c8faa21ad10664295d0 .. _PR#651: https://github.com/python-semantic-release/python-semantic-release/pull/651 .. _PR#653: https://github.com/python-semantic-release/python-semantic-release/pull/653 .. _PR#655: https://github.com/python-semantic-release/python-semantic-release/pull/655 .. _changelog-v8.0.3: v8.0.3 (2023-07-21) =================== 🪲 Bug Fixes ------------ * Skip non-parsable versions when calculating next version (`PR#649`_, `88f25ea`_) .. _88f25ea: https://github.com/python-semantic-release/python-semantic-release/commit/88f25eae62589cdf53dbc3dfcb167a3ae6cba2d3 .. _PR#649: https://github.com/python-semantic-release/python-semantic-release/pull/649 .. _changelog-v8.0.2: v8.0.2 (2023-07-18) =================== 🪲 Bug Fixes ------------ * Handle missing configuration (`PR#644`_, `f15753c`_) 📖 Documentation ---------------- * Better description for tag_format usage (`2129b72`_) * Clarify v8 breaking changes in GitHub action inputs (`PR#643`_, `cda050c`_) * Correct version_toml example in migrating_from_v7.rst (`PR#641`_, `325d5e0`_) .. _2129b72: https://github.com/python-semantic-release/python-semantic-release/commit/2129b729837eccc41a33dbb49785a8a30ce6b187 .. _325d5e0: https://github.com/python-semantic-release/python-semantic-release/commit/325d5e048bd89cb2a94c47029d4878b27311c0f0 .. _cda050c: https://github.com/python-semantic-release/python-semantic-release/commit/cda050cd9e789d81458157ee240ff99ec65c6f25 .. _f15753c: https://github.com/python-semantic-release/python-semantic-release/commit/f15753ce652f36cc03b108c667a26ab74bcbf95d .. _PR#641: https://github.com/python-semantic-release/python-semantic-release/pull/641 .. _PR#643: https://github.com/python-semantic-release/python-semantic-release/pull/643 .. _PR#644: https://github.com/python-semantic-release/python-semantic-release/pull/644 .. _changelog-v8.0.1: v8.0.1 (2023-07-17) =================== 🪲 Bug Fixes ------------ * Invalid version in Git history should not cause a release failure (`PR#632`_, `254430b`_) 📖 Documentation ---------------- * Reduce readthedocs formats and add entries to migration from v7 guide (`9b6ddfe`_) * **migration**: Fix hyperlink (`PR#631`_, `5fbd52d`_) .. _254430b: https://github.com/python-semantic-release/python-semantic-release/commit/254430b5cc5f032016b4c73168f0403c4d87541e .. _5fbd52d: https://github.com/python-semantic-release/python-semantic-release/commit/5fbd52d7de4982b5689651201a0e07b445158645 .. _9b6ddfe: https://github.com/python-semantic-release/python-semantic-release/commit/9b6ddfef448f9de30fa2845034f76655d34a9912 .. _PR#631: https://github.com/python-semantic-release/python-semantic-release/pull/631 .. _PR#632: https://github.com/python-semantic-release/python-semantic-release/pull/632 .. _changelog-v8.0.0: v8.0.0 (2023-07-16) =================== ✨ Features ----------- * **publish-cmd**: Add ``--post-to-release-tag`` option to control where to publish (`PR#619`_, `ec30564`_) * Make it easier to access commit messages in ParsedCommits (`PR#619`_, `ec30564`_) * Remove publication of ``dists/`` to artifact repository (`PR#619`_, `ec30564`_) * Rename 'upload' configuration section to 'publish' (`PR#619`_, `ec30564`_) * **github-action**: Add GitHub Actions output variables (`PR#619`_, `ec30564`_) * **version-cmd**: Add ``--skip-build`` option (`PR#619`_, `ec30564`_) * **version-cmd** Add ``--strict`` version mode (`PR#619`_, `ec30564`_) 🪲 Bug Fixes ------------ * Add logging for token auth, use token for push (`PR#619`_, `ec30564`_) * Caching for repo owner and name (`PR#619`_, `ec30564`_) * Correct assets type in configuration (`PR#619`_, `ec30564`_) * Correct assets type-annotation for RuntimeContext (`PR#619`_, `ec30564`_) * Correct Dockerfile CLI command and GHA fetch (`PR#619`_, `ec30564`_) * Correct handling of build commands (`PR#619`_, `ec30564`_) * Correct logic for generating release notes (`PR#619`_, `ec30564`_) * Create_or_update_release for Gitlab hvcs (`PR#619`_, `ec30564`_) * Make additional attributes available for template authors (`PR#619`_, `ec30564`_) * Only call Github Action output callback once defaults are set (`PR#619`_, `ec30564`_) * Remove commit amending behavior (`PR#619`_, `ec30564`_) * Resolve branch checkout logic in GHA (`PR#619`_, `ec30564`_) * Resolve bug in changelog logic, enable upload to pypi (`PR#619`_, `ec30564`_) * Resolve loss of tag_format configuration (`PR#619`_, `ec30564`_) * **github-action**: Pin Debian version in Dockerfile (`PR#619`_, `ec30564`_) * **github-action**: Correct input parsing (`PR#619`_, `ec30564`_) * **github-action**: Mark container fs as safe for git to operate on (`PR#619`_, `ec30564`_) * **github-action**: Quotation for git config command (`PR#619`_, `ec30564`_) * **github-action**: Remove default for 'force' (`PR#619`_, `ec30564`_) 📖 Documentation ---------------- * Convert to Furo theme (`PR#619`_, `ec30564`_) * Fix typo (`PR#619`_, `ec30564`_) * Remove reference to dist publication (`PR#619`_, `ec30564`_) * Update docs with additional required permissions (`PR#619`_, `ec30564`_) * **changelog-templates**: fix typo (`PR#619`_, `ec30564`_) â™»ï¸ Refactoring --------------- * Remove verify-ci command (`PR#619`_, `ec30564`_) 💥 BREAKING CHANGES -------------------- * numerous breaking changes, see :ref:`upgrade_v8` for more information .. _ec30564: https://github.com/python-semantic-release/python-semantic-release/commit/ec30564b4ec732c001d76d3c09ba033066d2b6fe .. _PR#619: https://github.com/python-semantic-release/python-semantic-release/pull/619 .. _changelog-v7.34.6: v7.34.6 (2023-06-17) ==================== 🪲 Bug Fixes ------------ * Relax invoke dependency constraint (`18ea200`_) .. _18ea200: https://github.com/python-semantic-release/python-semantic-release/commit/18ea200633fd67e07f3d4121df5aa4c6dd29d154 .. _changelog-v7.34.5: v7.34.5 (2023-06-17) ==================== 🪲 Bug Fixes ------------ * Consider empty commits (`PR#608`_, `6f2e890`_) .. _6f2e890: https://github.com/python-semantic-release/python-semantic-release/commit/6f2e8909636595d3cb5e858f42c63820cda45974 .. _PR#608: https://github.com/python-semantic-release/python-semantic-release/pull/608 .. _changelog-v7.34.4: v7.34.4 (2023-06-15) ==================== 🪲 Bug Fixes ------------ * Docker build fails installing git (`PR#605`_, `9e3eb97`_) .. _9e3eb97: https://github.com/python-semantic-release/python-semantic-release/commit/9e3eb979783bc39ca564c2967c6c77eecba682e6 .. _PR#605: https://github.com/python-semantic-release/python-semantic-release/pull/605 .. _changelog-v7.34.3: v7.34.3 (2023-06-01) ==================== 🪲 Bug Fixes ------------ * Generate markdown linter compliant changelog headers & lists (`PR#597`_, `cc87400`_) .. _cc87400: https://github.com/python-semantic-release/python-semantic-release/commit/cc87400d4a823350de7d02dc3172d2488c9517db .. _PR#597: https://github.com/python-semantic-release/python-semantic-release/pull/597 .. _changelog-v7.34.2: v7.34.2 (2023-05-29) ==================== 🪲 Bug Fixes ------------ * Open all files with explicit utf-8 encoding (`PR#596`_, `cb71f35`_) .. _cb71f35: https://github.com/python-semantic-release/python-semantic-release/commit/cb71f35c26c1655e675fa735fa880d39a2c8af9c .. _PR#596: https://github.com/python-semantic-release/python-semantic-release/pull/596 .. _changelog-v7.34.1: v7.34.1 (2023-05-28) ==================== 🪲 Bug Fixes ------------ * Generate markdown linter compliant changelog headers & lists (`PR#594`_, `9d9d403`_) .. _9d9d403: https://github.com/python-semantic-release/python-semantic-release/commit/9d9d40305c499c907335abe313e3ed122db0b154 .. _PR#594: https://github.com/python-semantic-release/python-semantic-release/pull/594 .. _changelog-v7.34.0: v7.34.0 (2023-05-28) ==================== ✨ Features ----------- * Add option to only parse commits for current working directory (`PR#509`_, `cdf8116`_) .. _cdf8116: https://github.com/python-semantic-release/python-semantic-release/commit/cdf8116c1e415363b10a01f541873e04ad874220 .. _PR#509: https://github.com/python-semantic-release/python-semantic-release/pull/509 .. _changelog-v7.33.5: v7.33.5 (2023-05-19) ==================== 🪲 Bug Fixes ------------ * Update docs and default config for gitmoji changes (`PR#590`_, `192da6e`_) * Update sphinx dep (`PR#590`_, `192da6e`_) 📖 Documentation ---------------- * Update broken badge and add links (`PR#591`_, `0c23447`_) .. _0c23447: https://github.com/python-semantic-release/python-semantic-release/commit/0c234475d27ad887b19170c82deb80293b3a95f1 .. _192da6e: https://github.com/python-semantic-release/python-semantic-release/commit/192da6e1352298b48630423d50191070a1c5ab24 .. _PR#590: https://github.com/python-semantic-release/python-semantic-release/pull/590 .. _PR#591: https://github.com/python-semantic-release/python-semantic-release/pull/591 .. _changelog-v7.33.4: v7.33.4 (2023-05-14) ==================== 🪲 Bug Fixes ------------ * If prerelease, publish prerelease (`PR#587`_, `927da9f`_) .. _927da9f: https://github.com/python-semantic-release/python-semantic-release/commit/927da9f8feb881e02bc08b33dc559bd8e7fc41ab .. _PR#587: https://github.com/python-semantic-release/python-semantic-release/pull/587 .. _changelog-v7.33.3: v7.33.3 (2023-04-24) ==================== 🪲 Bug Fixes ------------ * Trim emojis from config (`PR#583`_, `02902f7`_) * Update Gitmojis according to official node module (`PR#582`_, `806fcfa`_) 📖 Documentation ---------------- * Grammar in ``docs/troubleshooting.rst`` (`PR#557`_, `bbe754a`_) * Spelling and grammar in ``travis.rst`` (`PR#556`_, `3a76e9d`_) * Update repository name (`PR#559`_, `5cdb05e`_) .. _02902f7: https://github.com/python-semantic-release/python-semantic-release/commit/02902f73ee961565c2470c000f00947d9ef06cb1 .. _3a76e9d: https://github.com/python-semantic-release/python-semantic-release/commit/3a76e9d7505c421009eb3e953c32cccac2e70e07 .. _5cdb05e: https://github.com/python-semantic-release/python-semantic-release/commit/5cdb05e20f17b12890e1487c42d317dcbadd06c8 .. _806fcfa: https://github.com/python-semantic-release/python-semantic-release/commit/806fcfa4cfdd3df4b380afd015a68dc90d54215a .. _bbe754a: https://github.com/python-semantic-release/python-semantic-release/commit/bbe754a3db9ce7132749e7902fe118b52f48ee42 .. _PR#556: https://github.com/python-semantic-release/python-semantic-release/pull/556 .. _PR#557: https://github.com/python-semantic-release/python-semantic-release/pull/557 .. _PR#559: https://github.com/python-semantic-release/python-semantic-release/pull/559 .. _PR#582: https://github.com/python-semantic-release/python-semantic-release/pull/582 .. _PR#583: https://github.com/python-semantic-release/python-semantic-release/pull/583 .. _changelog-v7.33.2: v7.33.2 (2023-02-17) ==================== 🪲 Bug Fixes ------------ * Inconsistent versioning between print-version and publish (`PR#524`_, `17d60e9`_) .. _17d60e9: https://github.com/python-semantic-release/python-semantic-release/commit/17d60e9bf66f62e5845065486c9d5e450f74839a .. _PR#524: https://github.com/python-semantic-release/python-semantic-release/pull/524 .. _changelog-v7.33.1: v7.33.1 (2023-02-01) ==================== 🪲 Bug Fixes ------------ * **action**: Mark container fs as safe for git (`PR#552`_, `2a55f68`_) .. _2a55f68: https://github.com/python-semantic-release/python-semantic-release/commit/2a55f68e2b3cb9ffa9204c00ddbf12706af5c070 .. _PR#552: https://github.com/python-semantic-release/python-semantic-release/pull/552 .. _changelog-v7.33.0: v7.33.0 (2023-01-15) ==================== ✨ Features ----------- * Add signing options to action (`31ad5eb`_) * Update action with configuration options (`PR#518`_, `4664afe`_) * **repository**: Add support for TWINE_CERT, closes `#521`_ (`PR#522`_, `d56e85d`_) 🪲 Bug Fixes ------------ * Changelog release commit search logic (`PR#530`_, `efb3410`_) * **github-actions**: Bump Dockerfile to use Python 3.10 image, closes `#533`_ (`PR#536`_, `8f2185d`_) * **action**: Fix environment variable names (`3c66218`_) 📖 Documentation ---------------- * Update documentation (`5cbdad2`_) .. _#521: https://github.com/python-semantic-release/python-semantic-release/issues/521 .. _#533: https://github.com/python-semantic-release/python-semantic-release/issues/533 .. _31ad5eb: https://github.com/python-semantic-release/python-semantic-release/commit/31ad5eb5a25f0ea703afc295351104aefd66cac1 .. _3c66218: https://github.com/python-semantic-release/python-semantic-release/commit/3c66218640044adf263fcf9b2714cfc4b99c2e90 .. _4664afe: https://github.com/python-semantic-release/python-semantic-release/commit/4664afe5f80a04834e398fefb841b166a51d95b7 .. _5cbdad2: https://github.com/python-semantic-release/python-semantic-release/commit/5cbdad296034a792c9bf05e3700eac4f847eb469 .. _8f2185d: https://github.com/python-semantic-release/python-semantic-release/commit/8f2185d570b3966b667ac591ae523812e9d2e00f .. _d56e85d: https://github.com/python-semantic-release/python-semantic-release/commit/d56e85d1f2ac66fb0b59af2178164ca915dbe163 .. _efb3410: https://github.com/python-semantic-release/python-semantic-release/commit/efb341036196c39b4694ca4bfa56c6b3e0827c6c .. _PR#518: https://github.com/python-semantic-release/python-semantic-release/pull/518 .. _PR#522: https://github.com/python-semantic-release/python-semantic-release/pull/522 .. _PR#530: https://github.com/python-semantic-release/python-semantic-release/pull/530 .. _PR#536: https://github.com/python-semantic-release/python-semantic-release/pull/536 .. _PR#541: https://github.com/python-semantic-release/python-semantic-release/pull/541 .. _changelog-v7.32.2: v7.32.2 (2022-10-22) ==================== 🪲 Bug Fixes ------------ * Fix changelog generation in tag-mode (`PR#171`_, `482a62e`_) 📖 Documentation ---------------- * Fix code blocks (`PR#506`_, `24b7673`_) .. _24b7673: https://github.com/python-semantic-release/python-semantic-release/commit/24b767339fcef1c843f7dd3188900adab05e03b1 .. _482a62e: https://github.com/python-semantic-release/python-semantic-release/commit/482a62ec374208b2d57675cb0b7f0ab9695849b9 .. _PR#171: https://github.com/python-semantic-release/python-semantic-release/pull/171 .. _PR#506: https://github.com/python-semantic-release/python-semantic-release/pull/506 .. _changelog-v7.32.1: v7.32.1 (2022-10-07) ==================== 🪲 Bug Fixes ------------ * Corrections for deprecation warnings (`PR#505`_, `d47afb6`_) 📖 Documentation ---------------- * Correct spelling mistakes (`PR#504`_, `3717e0d`_) .. _3717e0d: https://github.com/python-semantic-release/python-semantic-release/commit/3717e0d8810f5d683847c7b0e335eeefebbf2921 .. _d47afb6: https://github.com/python-semantic-release/python-semantic-release/commit/d47afb6516238939e174f946977bf4880062a622 .. _PR#504: https://github.com/python-semantic-release/python-semantic-release/pull/504 .. _PR#505: https://github.com/python-semantic-release/python-semantic-release/pull/505 .. _changelog-v7.32.0: v7.32.0 (2022-09-25) ==================== ✨ Features ----------- * Add setting for enforcing textual changelog sections, closes `#498`_ (`PR#502`_, `988437d`_) 📖 Documentation ---------------- * Correct documented default behavior for ``commit_version_number`` (`PR#497`_, `ffae2dc`_) .. _#498: https://github.com/python-semantic-release/python-semantic-release/issues/498 .. _988437d: https://github.com/python-semantic-release/python-semantic-release/commit/988437d21e40d3e3b1c95ed66b535bdd523210de .. _ffae2dc: https://github.com/python-semantic-release/python-semantic-release/commit/ffae2dc68f7f4bc13c5fd015acd43b457e568ada .. _PR#497: https://github.com/python-semantic-release/python-semantic-release/pull/497 .. _PR#502: https://github.com/python-semantic-release/python-semantic-release/pull/502 .. _changelog-v7.31.4: v7.31.4 (2022-08-23) ==================== 🪲 Bug Fixes ------------ * Account for trailing newlines in commit messages, closes `#490`_ (`PR#495`_, `111b151`_) .. _#490: https://github.com/python-semantic-release/python-semantic-release/issues/490 .. _111b151: https://github.com/python-semantic-release/python-semantic-release/commit/111b1518e8c8e2bd7535bd4c4b126548da384605 .. _PR#495: https://github.com/python-semantic-release/python-semantic-release/pull/495 .. _changelog-v7.31.3: v7.31.3 (2022-08-22) ==================== 🪲 Bug Fixes ------------ * Use ``commit_subject`` when searching for release commits (`PR#488`_, `3849ed9`_) .. _3849ed9: https://github.com/python-semantic-release/python-semantic-release/commit/3849ed992c3cff9054b8690bcf59e49768f84f47 .. _PR#488: https://github.com/python-semantic-release/python-semantic-release/pull/488 .. _changelog-v7.31.2: v7.31.2 (2022-07-29) ==================== 🪲 Bug Fixes ------------ * Add better handling of missing changelog placeholder, closes `#454`_ (`e7a0e81`_) * Add repo=None when not in git repo, closes `#422`_ (`40be804`_) 📖 Documentation ---------------- * Add example for pyproject.toml (`2a4b8af`_) .. _#422: https://github.com/python-semantic-release/python-semantic-release/issues/422 .. _#454: https://github.com/python-semantic-release/python-semantic-release/issues/454 .. _2a4b8af: https://github.com/python-semantic-release/python-semantic-release/commit/2a4b8af1c2893a769c02476bb92f760c8522bd7a .. _40be804: https://github.com/python-semantic-release/python-semantic-release/commit/40be804c09ab8a036fb135c9c38a63f206d2742c .. _e7a0e81: https://github.com/python-semantic-release/python-semantic-release/commit/e7a0e81c004ade73ed927ba4de8c3e3ccaf0047c .. _changelog-v7.31.1: v7.31.1 (2022-07-29) ==================== 🪲 Bug Fixes ------------ * Update git email in action, closes `#473`_ (`0ece6f2`_) .. _#473: https://github.com/python-semantic-release/python-semantic-release/issues/473 .. _0ece6f2: https://github.com/python-semantic-release/python-semantic-release/commit/0ece6f263ff02a17bb1e00e7ed21c490f72e3d00 .. _changelog-v7.31.0: v7.31.0 (2022-07-29) ==================== ✨ Features ----------- * Add prerelease-patch and no-prerelease-patch flags for whether to auto-bump prereleases (`b4e5b62`_) * Override repository_url w REPOSITORY_URL env var (`PR#439`_, `cb7578c`_) 🪲 Bug Fixes ------------ * :bug: fix get_current_release_version for tag_only version_source (`cad09be`_) .. _b4e5b62: https://github.com/python-semantic-release/python-semantic-release/commit/b4e5b626074f969e4140c75fdac837a0625cfbf6 .. _cad09be: https://github.com/python-semantic-release/python-semantic-release/commit/cad09be9ba067f1c882379c0f4b28115a287fc2b .. _cb7578c: https://github.com/python-semantic-release/python-semantic-release/commit/cb7578cf005b8bd65d9b988f6f773e4c060982e3 .. _PR#439: https://github.com/python-semantic-release/python-semantic-release/pull/439 .. _changelog-v7.30.2: v7.30.2 (2022-07-26) ==================== 🪲 Bug Fixes ------------ * Declare additional_options as action inputs (`PR#481`_, `cb5d8c7`_) .. _cb5d8c7: https://github.com/python-semantic-release/python-semantic-release/commit/cb5d8c7ce7d013fcfabd7696b5ffb846a8a6f853 .. _PR#481: https://github.com/python-semantic-release/python-semantic-release/pull/481 .. _changelog-v7.30.1: v7.30.1 (2022-07-25) ==================== 🪲 Bug Fixes ------------ * Don't use commit_subject for tag pattern matching (`PR#480`_, `ac3f11e`_) .. _ac3f11e: https://github.com/python-semantic-release/python-semantic-release/commit/ac3f11e689f4a290d20b68b9c5c214098eb61b5f .. _PR#480: https://github.com/python-semantic-release/python-semantic-release/pull/480 .. _changelog-v7.30.0: v7.30.0 (2022-07-25) ==================== ✨ Features ----------- * Add ``additional_options`` input for GitHub Action (`PR#477`_, `aea60e3`_) 🪲 Bug Fixes ------------ * Allow empty additional options (`PR#479`_, `c9b2514`_) .. _aea60e3: https://github.com/python-semantic-release/python-semantic-release/commit/aea60e3d290c6fe3137bff21e0db1ed936233776 .. _c9b2514: https://github.com/python-semantic-release/python-semantic-release/commit/c9b2514d3e164b20e78b33f60989d78c2587e1df .. _PR#477: https://github.com/python-semantic-release/python-semantic-release/pull/477 .. _PR#479: https://github.com/python-semantic-release/python-semantic-release/pull/479 .. _changelog-v7.29.7: v7.29.7 (2022-07-24) ==================== 🪲 Bug Fixes ------------ * Ignore dependency version bumps when parsing version from commit logs (`PR#476`_, `51bcb78`_) .. _51bcb78: https://github.com/python-semantic-release/python-semantic-release/commit/51bcb780a9f55fadfaf01612ff65c1f92642c2c1 .. _PR#476: https://github.com/python-semantic-release/python-semantic-release/pull/476 .. _changelog-v7.29.6: v7.29.6 (2022-07-15) ==================== 🪲 Bug Fixes ------------ * Allow changing prerelease tag using CLI flags (`PR#466`_, `395bf4f`_) .. _395bf4f: https://github.com/python-semantic-release/python-semantic-release/commit/395bf4f2de73663c070f37cced85162d41934213 .. _PR#466: https://github.com/python-semantic-release/python-semantic-release/pull/466 .. _changelog-v7.29.5: v7.29.5 (2022-07-14) ==================== 🪲 Bug Fixes ------------ * Add packaging module requirement (`PR#469`_, `b99c9fa`_) * **publish**: Get version bump for current release (`PR#467`_, `dd26888`_) .. _b99c9fa: https://github.com/python-semantic-release/python-semantic-release/commit/b99c9fa88dc25e5ceacb131cd93d9079c4fb2c86 .. _dd26888: https://github.com/python-semantic-release/python-semantic-release/commit/dd26888a923b2f480303c19f1916647de48b02bf .. _PR#467: https://github.com/python-semantic-release/python-semantic-release/pull/467 .. _PR#469: https://github.com/python-semantic-release/python-semantic-release/pull/469 .. _changelog-v7.29.4: v7.29.4 (2022-06-29) ==================== 🪲 Bug Fixes ------------ * Add text for empty ValueError (`PR#461`_, `733254a`_) .. _733254a: https://github.com/python-semantic-release/python-semantic-release/commit/733254a99320d8c2f964d799ac4ec29737867faa .. _PR#461: https://github.com/python-semantic-release/python-semantic-release/pull/461 .. _changelog-v7.29.3: v7.29.3 (2022-06-26) ==================== 🪲 Bug Fixes ------------ * Ensure that assets can be uploaded successfully on custom GitHub servers (`PR#458`_, `32b516d`_) .. _32b516d: https://github.com/python-semantic-release/python-semantic-release/commit/32b516d7aded4afcafe4aa56d6a5a329b3fc371d .. _PR#458: https://github.com/python-semantic-release/python-semantic-release/pull/458 .. _changelog-v7.29.2: v7.29.2 (2022-06-20) ==================== 🪲 Bug Fixes ------------ * Ensure should_bump checks against release version if not prerelease (`PR#457`_, `da0606f`_) .. _da0606f: https://github.com/python-semantic-release/python-semantic-release/commit/da0606f0d67ada5f097c704b9423ead3b5aca6b2 .. _PR#457: https://github.com/python-semantic-release/python-semantic-release/pull/457 .. _changelog-v7.29.1: v7.29.1 (2022-06-01) ==================== 🪲 Bug Fixes ------------ * Capture correct release version when patch has more than one digit (`PR#448`_, `426cdc7`_) .. _426cdc7: https://github.com/python-semantic-release/python-semantic-release/commit/426cdc7d7e0140da67f33b6853af71b2295aaac2 .. _PR#448: https://github.com/python-semantic-release/python-semantic-release/pull/448 .. _changelog-v7.29.0: v7.29.0 (2022-05-27) ==================== ✨ Features ----------- * Allow using ssh-key to push version while using token to publish to hvcs (`PR#419`_, `7b2dffa`_) * **config**: Add ignore_token_for_push param (`PR#419`_, `7b2dffa`_) 🪲 Bug Fixes ------------ * Fix and refactor prerelease (`PR#435`_, `94c9494`_) * **test**: Override GITHUB_ACTOR env (`PR#419`_, `7b2dffa`_) 📖 Documentation ---------------- * Add documentation for ignore_token_for_push (`PR#419`_, `7b2dffa`_) .. _7b2dffa: https://github.com/python-semantic-release/python-semantic-release/commit/7b2dffadf43c77d5e0eea307aefcee5c7744df5c .. _94c9494: https://github.com/python-semantic-release/python-semantic-release/commit/94c94942561f85f48433c95fd3467e03e0893ab4 .. _PR#419: https://github.com/python-semantic-release/python-semantic-release/pull/419 .. _PR#435: https://github.com/python-semantic-release/python-semantic-release/pull/435 .. _changelog-v7.28.1: v7.28.1 (2022-04-14) ==================== 🪲 Bug Fixes ------------ * Fix getting current version when ``version_source=tag_only`` (`PR#437`_, `b247936`_) .. _b247936: https://github.com/python-semantic-release/python-semantic-release/commit/b247936a81c0d859a34bf9f17ab8ca6a80488081 .. _PR#437: https://github.com/python-semantic-release/python-semantic-release/pull/437 .. _changelog-v7.28.0: v7.28.0 (2022-04-11) ==================== ✨ Features ----------- * Add ``tag_only`` option for ``version_source``, closes `#354`_ (`PR#436`_, `cf74339`_) .. _#354: https://github.com/python-semantic-release/python-semantic-release/issues/354 .. _cf74339: https://github.com/python-semantic-release/python-semantic-release/commit/cf743395456a86c62679c2c0342502af043bfc3b .. _PR#436: https://github.com/python-semantic-release/python-semantic-release/pull/436 .. _changelog-v7.27.1: v7.27.1 (2022-04-03) ==================== 🪲 Bug Fixes ------------ * **prerelease**: Pass prerelease option to get_current_version (`PR#432`_, `aabab0b`_) .. _aabab0b: https://github.com/python-semantic-release/python-semantic-release/commit/aabab0b7ce647d25e0c78ae6566f1132ece9fcb9 .. _PR#432: https://github.com/python-semantic-release/python-semantic-release/pull/432 .. _changelog-v7.27.0: v7.27.0 (2022-03-15) ==================== ✨ Features ----------- * Add git-lfs to docker container (`PR#427`_, `184e365`_) .. _184e365: https://github.com/python-semantic-release/python-semantic-release/commit/184e3653932979b82e5a62b497f2a46cbe15ba87 .. _PR#427: https://github.com/python-semantic-release/python-semantic-release/pull/427 .. _changelog-v7.26.0: v7.26.0 (2022-03-07) ==================== ✨ Features ----------- * **publish-cmd**: add ``--prerelease`` cli flag to enable prerelease versioning (`PR#413`_, `7064265`_) * **version-cmd**: add ``--prerelease`` cli flag to enable prerelease versioning (`PR#413`_, `7064265`_) 📖 Documentation ---------------- * Added basic info about prerelease versioning (`PR#413`_, `7064265`_) .. _7064265: https://github.com/python-semantic-release/python-semantic-release/commit/7064265627a2aba09caa2873d823b594e0e23e77 .. _PR#413: https://github.com/python-semantic-release/python-semantic-release/pull/413 .. _changelog-v7.25.2: v7.25.2 (2022-02-24) ==================== 🪲 Bug Fixes ------------ * **gitea**: Use form-data from asset upload (`PR#421`_, `e011944`_) .. _e011944: https://github.com/python-semantic-release/python-semantic-release/commit/e011944987885f75b80fe16a363f4befb2519a91 .. _PR#421: https://github.com/python-semantic-release/python-semantic-release/pull/421 .. _changelog-v7.25.1: v7.25.1 (2022-02-23) ==================== 🪲 Bug Fixes ------------ * **gitea**: Build status and asset upload (`PR#420`_, `57db81f`_) * **gitea**: Handle list build status response (`PR#420`_, `57db81f`_) .. _57db81f: https://github.com/python-semantic-release/python-semantic-release/commit/57db81f4c6b96da8259e3bad9137eaccbcd10f6e .. _PR#420: https://github.com/python-semantic-release/python-semantic-release/pull/420 .. _changelog-v7.25.0: v7.25.0 (2022-02-17) ==================== ✨ Features ----------- * **hvcs**: Add gitea support (`PR#412`_, `b7e7936`_) 📖 Documentation ---------------- * Document tag_commit, closes `#410`_ (`b631ca0`_) .. _#410: https://github.com/python-semantic-release/python-semantic-release/issues/410 .. _b631ca0: https://github.com/python-semantic-release/python-semantic-release/commit/b631ca0a79cb2d5499715d43688fc284cffb3044 .. _b7e7936: https://github.com/python-semantic-release/python-semantic-release/commit/b7e7936331b7939db09abab235c8866d800ddc1a .. _PR#412: https://github.com/python-semantic-release/python-semantic-release/pull/412 .. _changelog-v7.24.0: v7.24.0 (2022-01-24) ==================== ✨ Features ----------- * Include additional changes in release commits (`3e34f95`_) .. _3e34f95: https://github.com/python-semantic-release/python-semantic-release/commit/3e34f957ff5a3ec6e6f984cc4a79a38ce4391ea9 .. _changelog-v7.23.0: v7.23.0 (2021-11-30) ==================== ✨ Features ----------- * Support Github Enterprise server (`b4e01f1`_) .. _b4e01f1: https://github.com/python-semantic-release/python-semantic-release/commit/b4e01f1b7e841263fa84f57f0ac331f7c0b31954 .. _changelog-v7.22.0: v7.22.0 (2021-11-21) ==================== ✨ Features ----------- * **parser_angular**: Allow customization in parser (`298eebb`_) 🪲 Bug Fixes ------------ * Address PR feedback for ``parser_angular.py`` (`f7bc458`_) .. _298eebb: https://github.com/python-semantic-release/python-semantic-release/commit/298eebbfab5c083505036ba1df47a5874a1eed6e .. _f7bc458: https://github.com/python-semantic-release/python-semantic-release/commit/f7bc45841e6a5c762f99f936c292cee25fabcd02 .. _changelog-v7.21.0: v7.21.0 (2021-11-21) ==================== ✨ Features ----------- * Use gitlab-ci or github actions env vars, closes `#363`_ (`8ca8dd4`_) 🪲 Bug Fixes ------------ * Remove invalid repository exception (`746b62d`_) .. _#363: https://github.com/python-semantic-release/python-semantic-release/issues/363 .. _746b62d: https://github.com/python-semantic-release/python-semantic-release/commit/746b62d4e207a5d491eecd4ca96d096eb22e3bed .. _8ca8dd4: https://github.com/python-semantic-release/python-semantic-release/commit/8ca8dd40f742f823af147928bd75a9577c50d0fd .. _changelog-v7.20.0: v7.20.0 (2021-11-21) ==================== ✨ Features ----------- * Allow custom environment variable names (`PR#392`_, `372cda3`_) * Rewrite Twine adapter for uploading to artifact repositories (`cfb20af`_) 🪲 Bug Fixes ------------ * Don't use linux commands on windows (`PR#393`_, `5bcccd2`_) * Mypy errors in vcs_helpers (`13ca0fe`_) * Skip removing the build folder if it doesn't exist (`8e79fdc`_) 📖 Documentation ---------------- * Clean typos and add section for repository upload (`1efa18a`_) .. _13ca0fe: https://github.com/python-semantic-release/python-semantic-release/commit/13ca0fe650125be2f5e953f6193fdc4d44d3c75a .. _1efa18a: https://github.com/python-semantic-release/python-semantic-release/commit/1efa18a3a55134d6bc6e4572ab025e24082476cd .. _372cda3: https://github.com/python-semantic-release/python-semantic-release/commit/372cda3497f16ead2209e6e1377d38f497144883 .. _5bcccd2: https://github.com/python-semantic-release/python-semantic-release/commit/5bcccd21cc8be3289db260e645fec8dc6a592abd .. _8e79fdc: https://github.com/python-semantic-release/python-semantic-release/commit/8e79fdc107ffd852a91dfb5473e7bd1dfaba4ee5 .. _cfb20af: https://github.com/python-semantic-release/python-semantic-release/commit/cfb20af79a8e25a77aee9ff72deedcd63cb7f62f .. _PR#392: https://github.com/python-semantic-release/python-semantic-release/pull/392 .. _PR#393: https://github.com/python-semantic-release/python-semantic-release/pull/393 .. _changelog-v7.19.2: v7.19.2 (2021-09-04) ==================== 🪲 Bug Fixes ------------ * Fixed ImproperConfig import error (`PR#377`_, `b011a95`_) .. _b011a95: https://github.com/python-semantic-release/python-semantic-release/commit/b011a9595df4240cb190bfb1ab5b6d170e430dfc .. _PR#377: https://github.com/python-semantic-release/python-semantic-release/pull/377 .. _changelog-v7.19.1: v7.19.1 (2021-08-17) ==================== 🪲 Bug Fixes ------------ * Add get_formatted_tag helper instead of hardcoded v-prefix in the git tags (`1a354c8`_) .. _1a354c8: https://github.com/python-semantic-release/python-semantic-release/commit/1a354c86abad77563ebce9a6944256461006f3c7 .. _changelog-v7.19.0: v7.19.0 (2021-08-16) ==================== ✨ Features ----------- * Custom git tag format support (`PR#373`_, `1d76632`_) 📖 Documentation ---------------- * **configuration**: define ``tag_format`` usage & resulting effect (`PR#373`_, `1d76632`_) * **parser**: Documentation for scipy-parser (`45ee34a`_) .. _1d76632: https://github.com/python-semantic-release/python-semantic-release/commit/1d76632043bf0b6076d214a63c92013624f4b95e .. _45ee34a: https://github.com/python-semantic-release/python-semantic-release/commit/45ee34aa21443860a6c2cd44a52da2f353b960bf .. _PR#373: https://github.com/python-semantic-release/python-semantic-release/pull/373 .. _changelog-v7.18.0: v7.18.0 (2021-08-09) ==================== ✨ Features ----------- * Add support for non-prefixed tags (`PR#366`_, `0fee4dd`_) 📖 Documentation ---------------- * Clarify second argument of ParsedCommit (`086ddc2`_) .. _086ddc2: https://github.com/python-semantic-release/python-semantic-release/commit/086ddc28f06522453328f5ea94c873bd202ff496 .. _0fee4dd: https://github.com/python-semantic-release/python-semantic-release/commit/0fee4ddb5baaddf85ed6b76e76a04474a5f97d0a .. _PR#366: https://github.com/python-semantic-release/python-semantic-release/pull/366 .. _changelog-v7.17.0: v7.17.0 (2021-08-07) ==================== ✨ Features ----------- * **parser**: Add scipy style parser (`PR#369`_, `51a3921`_) .. _51a3921: https://github.com/python-semantic-release/python-semantic-release/commit/51a39213ea120c4bbd7a57b74d4f0cc3103da9f5 .. _PR#369: https://github.com/python-semantic-release/python-semantic-release/pull/369 .. _changelog-v7.16.4: v7.16.4 (2021-08-03) ==================== 🪲 Bug Fixes ------------ * Correct rendering of gitlab issue references, closes `#358`_ (`07429ec`_) .. _#358: https://github.com/python-semantic-release/python-semantic-release/issues/358 .. _07429ec: https://github.com/python-semantic-release/python-semantic-release/commit/07429ec4a32d32069f25ec77b4bea963bd5d2a00 .. _changelog-v7.16.3: v7.16.3 (2021-07-29) ==================== 🪲 Bug Fixes ------------ * Print right info if token is not set, closes `#360`_ (`PR#361`_, `a275a7a`_) .. _#360: https://github.com/python-semantic-release/python-semantic-release/issues/360 .. _a275a7a: https://github.com/python-semantic-release/python-semantic-release/commit/a275a7a17def85ff0b41d254e4ee42772cce1981 .. _PR#361: https://github.com/python-semantic-release/python-semantic-release/pull/361 .. _changelog-v7.16.2: v7.16.2 (2021-06-25) ==================== 🪲 Bug Fixes ------------ * Use release-api for gitlab (`1ef5cab`_) 📖 Documentation ---------------- * Recommend setting a concurrency group for GitHub Actions (`34b0735`_) * Update trove classifiers to reflect supported versions (`PR#344`_, `7578004`_) .. _1ef5cab: https://github.com/python-semantic-release/python-semantic-release/commit/1ef5caba2d8dd0f2647bc51ede0ef7152d8b7b8d .. _34b0735: https://github.com/python-semantic-release/python-semantic-release/commit/34b07357ab3f4f4aa787b71183816ec8aaf334a8 .. _7578004: https://github.com/python-semantic-release/python-semantic-release/commit/7578004ed4b20c2bd553782443dfd77535faa377 .. _PR#344: https://github.com/python-semantic-release/python-semantic-release/pull/344 .. _changelog-v7.16.1: v7.16.1 (2021-06-08) ==================== 🪲 Bug Fixes ------------ * Tomlkit should stay at 0.7.0 (`769a5f3`_) .. _769a5f3: https://github.com/python-semantic-release/python-semantic-release/commit/769a5f31115cdb1f43f19a23fe72b96a8c8ba0fc .. _changelog-v7.16.0: v7.16.0 (2021-06-08) ==================== ✨ Features ----------- * Add option to omit tagging (`PR#341`_, `20603e5`_) .. _20603e5: https://github.com/python-semantic-release/python-semantic-release/commit/20603e53116d4f05e822784ce731b42e8cbc5d8f .. _PR#341: https://github.com/python-semantic-release/python-semantic-release/pull/341 .. _changelog-v7.15.6: v7.15.6 (2021-06-08) ==================== 🪲 Bug Fixes ------------ * Update click and tomlkit (`PR#339`_, `947ea3b`_) .. _947ea3b: https://github.com/python-semantic-release/python-semantic-release/commit/947ea3bc0750735941446cf4a87bae20e750ba12 .. _PR#339: https://github.com/python-semantic-release/python-semantic-release/pull/339 .. _changelog-v7.15.5: v7.15.5 (2021-05-26) ==================== 🪲 Bug Fixes ------------ * Pin tomlkit to 0.7.0 (`2cd0db4`_) .. _2cd0db4: https://github.com/python-semantic-release/python-semantic-release/commit/2cd0db4537bb9497b72eb496f6bab003070672ab .. _changelog-v7.15.4: v7.15.4 (2021-04-29) ==================== 🪲 Bug Fixes ------------ * Change log level of failed toml loading, closes `#235`_ (`24bb079`_) .. _#235: https://github.com/python-semantic-release/python-semantic-release/issues/235 .. _24bb079: https://github.com/python-semantic-release/python-semantic-release/commit/24bb079cbeff12e7043dd35dd0b5ae03192383bb .. _changelog-v7.15.3: v7.15.3 (2021-04-03) ==================== 🪲 Bug Fixes ------------ * Add venv to path in github action (`583c5a1`_) .. _583c5a1: https://github.com/python-semantic-release/python-semantic-release/commit/583c5a13e40061fc544b82decfe27a6c34f6d265 .. _changelog-v7.15.2: v7.15.2 (2021-04-03) ==================== 🪲 Bug Fixes ------------ * Run semantic-release in virtualenv in the github action, closes `#331`_ (`b508ea9`_) * Set correct path for venv in action script (`aac02b5`_) * Use absolute path for venv in github action (`d4823b3`_) 📖 Documentation ---------------- * Clarify that HVCS should be lowercase, closes `#330`_ (`da0ab0c`_) .. _#330: https://github.com/python-semantic-release/python-semantic-release/issues/330 .. _#331: https://github.com/python-semantic-release/python-semantic-release/issues/331 .. _aac02b5: https://github.com/python-semantic-release/python-semantic-release/commit/aac02b5a44a6959328d5879578aa3536bdf856c2 .. _b508ea9: https://github.com/python-semantic-release/python-semantic-release/commit/b508ea9f411c1cd4f722f929aab9f0efc0890448 .. _d4823b3: https://github.com/python-semantic-release/python-semantic-release/commit/d4823b3b6b1fcd5c33b354f814643c9aaf85a06a .. _da0ab0c: https://github.com/python-semantic-release/python-semantic-release/commit/da0ab0c62c4ce2fa0d815e5558aeec1a1e23bc89 .. _changelog-v7.15.1: v7.15.1 (2021-03-26) ==================== 🪲 Bug Fixes ------------ * Add support for setting build_command to "false", closes `#328`_ (`520cf1e`_) * Upgrade python-gitlab range, closes `#329`_ (`abfacc4`_) 📖 Documentation ---------------- * Add common options to documentation, closes `#327`_ (`20d79a5`_) .. _#327: https://github.com/python-semantic-release/python-semantic-release/issues/327 .. _#328: https://github.com/python-semantic-release/python-semantic-release/issues/328 .. _#329: https://github.com/python-semantic-release/python-semantic-release/issues/329 .. _20d79a5: https://github.com/python-semantic-release/python-semantic-release/commit/20d79a51bffa26d40607c1b77d10912992279112 .. _520cf1e: https://github.com/python-semantic-release/python-semantic-release/commit/520cf1eaa7816d0364407dbd17b5bc7c79806086 .. _abfacc4: https://github.com/python-semantic-release/python-semantic-release/commit/abfacc432300941d57488842e41c06d885637e6c .. _changelog-v7.15.0: v7.15.0 (2021-02-18) ==================== ✨ Features ----------- * Allow the use of .pypirc for twine uploads (`PR#325`_, `6bc56b8`_) 📖 Documentation ---------------- * Add documentation for releasing on a Jenkins instance (`PR#324`_, `77ad988`_) .. _6bc56b8: https://github.com/python-semantic-release/python-semantic-release/commit/6bc56b8aa63069a25a828a2d1a9038ecd09b7d5d .. _77ad988: https://github.com/python-semantic-release/python-semantic-release/commit/77ad988a2057be59e4559614a234d6871c06ee37 .. _PR#324: https://github.com/python-semantic-release/python-semantic-release/pull/324 .. _PR#325: https://github.com/python-semantic-release/python-semantic-release/pull/325 .. _changelog-v7.14.0: v7.14.0 (2021-02-11) ==================== ✨ Features ----------- * **checks**: Add support for Jenkins CI (`PR#322`_, `3e99855`_) 📖 Documentation ---------------- * Correct casing on proper nouns (`PR#320`_, `d51b999`_) * Correcting Python casing (`PR#320`_, `d51b999`_) * Correcting Semantic Versioning casing (`PR#320`_, `d51b999`_) .. _3e99855: https://github.com/python-semantic-release/python-semantic-release/commit/3e99855c6bc72b3e9a572c58cc14e82ddeebfff8 .. _d51b999: https://github.com/python-semantic-release/python-semantic-release/commit/d51b999a245a4e56ff7a09d0495c75336f2f150d .. _PR#320: https://github.com/python-semantic-release/python-semantic-release/pull/320 .. _PR#322: https://github.com/python-semantic-release/python-semantic-release/pull/322 .. _changelog-v7.13.2: v7.13.2 (2021-01-29) ==================== 🪲 Bug Fixes ------------ * Crash when TOML has no PSR section (`PR#319`_, `5f8ab99`_) * Fix crash when TOML has no PSR section (`PR#319`_, `5f8ab99`_) 📖 Documentation ---------------- * Fix ``version_toml`` example for Poetry (`PR#318`_, `39acb68`_) .. _39acb68: https://github.com/python-semantic-release/python-semantic-release/commit/39acb68bfffe8242040e476893639ba26fa0d6b5 .. _5f8ab99: https://github.com/python-semantic-release/python-semantic-release/commit/5f8ab99bf7254508f4b38fcddef2bdde8dd15a4c .. _PR#318: https://github.com/python-semantic-release/python-semantic-release/pull/318 .. _PR#319: https://github.com/python-semantic-release/python-semantic-release/pull/319 .. _changelog-v7.13.1: v7.13.1 (2021-01-26) ==================== 🪲 Bug Fixes ------------ * Use multiline version_pattern match in replace, closes `#306`_ (`PR#315`_, `1a85af4`_) .. _#306: https://github.com/python-semantic-release/python-semantic-release/issues/306 .. _1a85af4: https://github.com/python-semantic-release/python-semantic-release/commit/1a85af434325ce52e11b49895e115f7a936e417e .. _PR#315: https://github.com/python-semantic-release/python-semantic-release/pull/315 .. _changelog-v7.13.0: v7.13.0 (2021-01-26) ==================== ✨ Features ----------- * Support toml files for version declaration, closes `#245`_, `#275`_ (`PR#307`_, `9b62a7e`_) .. _#245: https://github.com/python-semantic-release/python-semantic-release/issues/245 .. _#275: https://github.com/python-semantic-release/python-semantic-release/issues/275 .. _9b62a7e: https://github.com/python-semantic-release/python-semantic-release/commit/9b62a7e377378667e716384684a47cdf392093fa .. _PR#307: https://github.com/python-semantic-release/python-semantic-release/pull/307 .. _changelog-v7.12.0: v7.12.0 (2021-01-25) ==================== ✨ Features ----------- * **github**: Retry GitHub API requests on failure (`PR#314`_, `ac241ed`_) 🪲 Bug Fixes ------------ * **github**: Add retries to github API requests (`PR#314`_, `ac241ed`_) 📖 Documentation ---------------- * **actions**: Pat must be passed to checkout step too, closes `#311`_ (`e2d8e47`_) .. _#311: https://github.com/python-semantic-release/python-semantic-release/issues/311 .. _ac241ed: https://github.com/python-semantic-release/python-semantic-release/commit/ac241edf4de39f4fc0ff561a749fa85caaf9e2ae .. _e2d8e47: https://github.com/python-semantic-release/python-semantic-release/commit/e2d8e47d2b02860881381318dcc088e150c0fcde .. _PR#314: https://github.com/python-semantic-release/python-semantic-release/pull/314 .. _changelog-v7.11.0: v7.11.0 (2021-01-08) ==================== ✨ Features ----------- * **print-version**: Add print-version command to output version (`512e3d9`_) 🪲 Bug Fixes ------------ * Add dot to --define option help (`eb4107d`_) * Avoid Unknown bump level 0 message (`8ab624c`_) * **actions**: Fix github actions with new main location (`6666672`_) âš™ï¸ Build System ---------------- * Add __main__.py magic file (`e93f36a`_) .. _512e3d9: https://github.com/python-semantic-release/python-semantic-release/commit/512e3d92706055bdf8d08b7c82927d3530183079 .. _6666672: https://github.com/python-semantic-release/python-semantic-release/commit/6666672d3d97ab7cdf47badfa3663f1a69c2dbdf .. _8ab624c: https://github.com/python-semantic-release/python-semantic-release/commit/8ab624cf3508b57a9656a0a212bfee59379d6f8b .. _e93f36a: https://github.com/python-semantic-release/python-semantic-release/commit/e93f36a7a10e48afb42c1dc3d860a5e2a07cf353 .. _eb4107d: https://github.com/python-semantic-release/python-semantic-release/commit/eb4107d2efdf8c885c8ae35f48f1b908d1fced32 .. _changelog-v7.10.0: v7.10.0 (2021-01-08) ==================== ✨ Features ----------- * **build**: Allow falsy values for build_command to disable build step (`c07a440`_) 📖 Documentation ---------------- * Fix incorrect reference syntax (`42027f0`_) * Rewrite getting started page (`97a9046`_) .. _42027f0: https://github.com/python-semantic-release/python-semantic-release/commit/42027f0d2bb64f4c9eaec65112bf7b6f67568e60 .. _97a9046: https://github.com/python-semantic-release/python-semantic-release/commit/97a90463872502d1207890ae1d9dd008b1834385 .. _c07a440: https://github.com/python-semantic-release/python-semantic-release/commit/c07a440f2dfc45a2ad8f7c454aaac180c4651f70 .. _changelog-v7.9.0: v7.9.0 (2020-12-21) =================== ✨ Features ----------- * **hvcs**: Add hvcs_domain config option, closes `#277`_ (`ab3061a`_) 🪲 Bug Fixes ------------ * **history**: Coerce version to string (`PR#298`_, `d4cdc3d`_) * **history**: Require semver >= 2.10 (`5087e54`_) .. _#277: https://github.com/python-semantic-release/python-semantic-release/issues/277 .. _5087e54: https://github.com/python-semantic-release/python-semantic-release/commit/5087e549399648cf2e23339a037b33ca8b62d954 .. _ab3061a: https://github.com/python-semantic-release/python-semantic-release/commit/ab3061ae93c49d71afca043b67b361e2eb2919e6 .. _d4cdc3d: https://github.com/python-semantic-release/python-semantic-release/commit/d4cdc3d3cd2d93f2a78f485e3ea107ac816c7d00 .. _PR#298: https://github.com/python-semantic-release/python-semantic-release/pull/298 .. _changelog-v7.8.2: v7.8.2 (2020-12-19) =================== ✨ Features ----------- * **repository**: Add to settings artifact repository (`f4ef373`_) 🪲 Bug Fixes ------------ * **cli**: Skip remove_dist where not needed (`04817d4`_) .. _04817d4: https://github.com/python-semantic-release/python-semantic-release/commit/04817d4ecfc693195e28c80455bfbb127485f36b .. _f4ef373: https://github.com/python-semantic-release/python-semantic-release/commit/f4ef3733b948282fba5a832c5c0af134609b26d2 .. _changelog-v7.8.1: v7.8.1 (2020-12-18) =================== 🪲 Bug Fixes ------------ * Filenames with unknown mimetype are now properly uploaded to github release (`f3ece78`_) * **logs**: Fix TypeError when enabling debug logs (`2591a94`_) .. _2591a94: https://github.com/python-semantic-release/python-semantic-release/commit/2591a94115114c4a91a48f5b10b3954f6ac932a1 .. _f3ece78: https://github.com/python-semantic-release/python-semantic-release/commit/f3ece78b2913e70f6b99907b192a1e92bbfd6b77 .. _changelog-v7.8.0: v7.8.0 (2020-12-18) =================== ✨ Features ----------- * Add ``upload_to_pypi_glob_patterns`` option (`42305ed`_) 🪲 Bug Fixes ------------ * **changelog**: Use "issues" link vs "pull" (`93e48c9`_) * **netrc**: Prefer using token defined in GH_TOKEN instead of .netrc file (`3af32a7`_) .. _3af32a7: https://github.com/python-semantic-release/python-semantic-release/commit/3af32a738f2f2841fd75ec961a8f49a0b1c387cf .. _42305ed: https://github.com/python-semantic-release/python-semantic-release/commit/42305ed499ca08c819c4e7e65fcfbae913b8e6e1 .. _93e48c9: https://github.com/python-semantic-release/python-semantic-release/commit/93e48c992cb8b763f430ecbb0b7f9c3ca00036e4 .. _changelog-v7.7.0: v7.7.0 (2020-12-12) =================== ✨ Features ----------- * **changelog**: Add PR links in markdown (`PR#282`_, `0448f6c`_) .. _0448f6c: https://github.com/python-semantic-release/python-semantic-release/commit/0448f6c350bbbf239a81fe13dc5f45761efa7673 .. _PR#282: https://github.com/python-semantic-release/python-semantic-release/pull/282 .. _changelog-v7.6.0: v7.6.0 (2020-12-06) =================== ✨ Features ----------- * Add ``major_on_zero`` option (`d324154`_) 📖 Documentation ---------------- * Add documentation for option ``major_on_zero`` (`2e8b26e`_) .. _2e8b26e: https://github.com/python-semantic-release/python-semantic-release/commit/2e8b26e4ee0316a2cf2a93c09c783024fcd6b3ba .. _d324154: https://github.com/python-semantic-release/python-semantic-release/commit/d3241540e7640af911eb24c71e66468feebb0d46 .. _changelog-v7.5.0: v7.5.0 (2020-12-04) =================== ✨ Features ----------- * **logs**: Include scope in changelogs (`PR#281`_, `21c96b6`_) .. _21c96b6: https://github.com/python-semantic-release/python-semantic-release/commit/21c96b688cc44cc6f45af962ffe6d1f759783f37 .. _PR#281: https://github.com/python-semantic-release/python-semantic-release/pull/281 .. _changelog-v7.4.1: v7.4.1 (2020-12-04) =================== 🪲 Bug Fixes ------------ * Add "changelog_capitalize" to flags, closes `#278`_ (`PR#279`_, `37716df`_) .. _#278: https://github.com/python-semantic-release/python-semantic-release/issues/278 .. _37716df: https://github.com/python-semantic-release/python-semantic-release/commit/37716dfa78eb3f848f57a5100d01d93f5aafc0bf .. _PR#279: https://github.com/python-semantic-release/python-semantic-release/pull/279 .. _changelog-v7.4.0: v7.4.0 (2020-11-24) =================== ✨ Features ----------- * Add changelog_capitalize configuration, closes `#260`_ (`7cacca1`_) 📖 Documentation ---------------- * Fix broken internal references (`PR#270`_, `da20b9b`_) * Update links to Github docs (`PR#268`_, `c53162e`_) .. _#260: https://github.com/python-semantic-release/python-semantic-release/issues/260 .. _7cacca1: https://github.com/python-semantic-release/python-semantic-release/commit/7cacca1eb436a7166ba8faf643b53c42bc32a6a7 .. _c53162e: https://github.com/python-semantic-release/python-semantic-release/commit/c53162e366304082a3bd5d143b0401da6a16a263 .. _da20b9b: https://github.com/python-semantic-release/python-semantic-release/commit/da20b9bdd3c7c87809c25ccb2a5993a7ea209a22 .. _PR#268: https://github.com/python-semantic-release/python-semantic-release/pull/268 .. _PR#270: https://github.com/python-semantic-release/python-semantic-release/pull/270 .. _changelog-v7.3.0: v7.3.0 (2020-09-28) =================== ✨ Features ----------- * Generate ``changelog.md`` file (`PR#266`_, `2587dfe`_) 📖 Documentation ---------------- * Fix docstring (`5a5e2cf`_) .. _2587dfe: https://github.com/python-semantic-release/python-semantic-release/commit/2587dfed71338ec6c816f58cdf0882382c533598 .. _5a5e2cf: https://github.com/python-semantic-release/python-semantic-release/commit/5a5e2cfb5e6653fb2e95e6e23e56559953b2c2b4 .. _PR#266: https://github.com/python-semantic-release/python-semantic-release/pull/266 .. _changelog-v7.2.5: v7.2.5 (2020-09-16) =================== 🪲 Bug Fixes ------------ * Add required to inputs in action metadata (`PR#264`_, `e76b255`_) .. _e76b255: https://github.com/python-semantic-release/python-semantic-release/commit/e76b255cf7d3d156e3314fc28c54d63fa126e973 .. _PR#264: https://github.com/python-semantic-release/python-semantic-release/pull/264 .. _changelog-v7.2.4: v7.2.4 (2020-09-14) =================== 🪲 Bug Fixes ------------ * Use range for toml dependency, closes `#241`_ (`45707e1`_) .. _#241: https://github.com/python-semantic-release/python-semantic-release/issues/241 .. _45707e1: https://github.com/python-semantic-release/python-semantic-release/commit/45707e1b7dcab48103a33de9d7f9fdb5a34dae4a .. _changelog-v7.2.3: v7.2.3 (2020-09-12) =================== 🪲 Bug Fixes ------------ * Support multiline version_pattern matching by default (`82f7849`_) 📖 Documentation ---------------- * Create 'getting started' instructions (`PR#256`_, `5f4d000`_) * Link to getting started guide in README (`f490e01`_) .. _5f4d000: https://github.com/python-semantic-release/python-semantic-release/commit/5f4d000c3f153d1d23128acf577e389ae879466e .. _82f7849: https://github.com/python-semantic-release/python-semantic-release/commit/82f7849dcf29ba658e0cb3b5d21369af8bf3c16f .. _f490e01: https://github.com/python-semantic-release/python-semantic-release/commit/f490e0194fa818db4d38c185bc5e6245bfde546b .. _PR#256: https://github.com/python-semantic-release/python-semantic-release/pull/256 .. _changelog-v7.2.2: v7.2.2 (2020-07-26) =================== 🪲 Bug Fixes ------------ * **changelog**: Send changelog to stdout, closes `#250`_ (`87e2bb8`_) 📖 Documentation ---------------- * Add quotation marks to the pip commands in CONTRIBUTING.rst (`PR#253`_, `e20fa43`_) .. _#250: https://github.com/python-semantic-release/python-semantic-release/issues/250 .. _87e2bb8: https://github.com/python-semantic-release/python-semantic-release/commit/87e2bb881387ff3ac245ab9923347a5a616e197b .. _e20fa43: https://github.com/python-semantic-release/python-semantic-release/commit/e20fa43098c06f5f585c81b9cd7e287dcce3fb5d .. _PR#253: https://github.com/python-semantic-release/python-semantic-release/pull/253 .. _changelog-v7.2.1: v7.2.1 (2020-06-29) =================== 🪲 Bug Fixes ------------ * Commit all files with bumped versions (`PR#249`_, `b3a1766`_) 📖 Documentation ---------------- * Give example of multiple build commands (`PR#248`_, `65f1ffc`_) .. _65f1ffc: https://github.com/python-semantic-release/python-semantic-release/commit/65f1ffcc6cac3bf382f4b821ff2be59d04f9f867 .. _b3a1766: https://github.com/python-semantic-release/python-semantic-release/commit/b3a1766be7edb7d2eb76f2726d35ab8298688b3b .. _PR#248: https://github.com/python-semantic-release/python-semantic-release/pull/248 .. _PR#249: https://github.com/python-semantic-release/python-semantic-release/pull/249 .. _changelog-v7.2.0: v7.2.0 (2020-06-15) =================== ✨ Features ----------- * Bump versions in multiple files, closes `#175`_ (`PR#246`_, `0ba2c47`_) .. _#175: https://github.com/python-semantic-release/python-semantic-release/issues/175 .. _0ba2c47: https://github.com/python-semantic-release/python-semantic-release/commit/0ba2c473c6e44cc326b3299b6ea3ddde833bdb37 .. _PR#246: https://github.com/python-semantic-release/python-semantic-release/pull/246 .. _changelog-v7.1.1: v7.1.1 (2020-05-28) =================== 🪲 Bug Fixes ------------ * **changelog**: Swap sha and message in table changelog (`6741370`_) .. _6741370: https://github.com/python-semantic-release/python-semantic-release/commit/6741370ab09b1706ff6e19b9fbe57b4bddefc70d .. _changelog-v7.1.0: v7.1.0 (2020-05-24) =================== ✨ Features ----------- * **changelog**: Add changelog_table component, closes `#237`_ (`PR#242`_, `fe6a7e7`_) .. _#237: https://github.com/python-semantic-release/python-semantic-release/issues/237 .. _fe6a7e7: https://github.com/python-semantic-release/python-semantic-release/commit/fe6a7e7fa014ffb827a1430dbcc10d1fc84c886b .. _PR#242: https://github.com/python-semantic-release/python-semantic-release/pull/242 .. _changelog-v7.0.0: v7.0.0 (2020-05-22) =================== ✨ Features ----------- * Pass changelog_sections to components (`PR#240`_, `3e17a98`_) * **changelog**: Add changelog components (`PR#240`_, `3e17a98`_) 📖 Documentation ---------------- * Add conda-forge badge (`e9536bb`_) * Add documentation for changelog_components (`PR#240`_, `3e17a98`_) 💥 BREAKING CHANGES ------------------- * **changelog**: The ``compare_url`` option has been removed in favor of using ``changelog_components``. This functionality is now available as the ``semantic_release.changelog.compare_url`` component. .. _3e17a98: https://github.com/python-semantic-release/python-semantic-release/commit/3e17a98d7fa8468868a87e62651ac2c010067711 .. _e9536bb: https://github.com/python-semantic-release/python-semantic-release/commit/e9536bbe119c9e3b90c61130c02468e0e1f14141 .. _PR#240: https://github.com/python-semantic-release/python-semantic-release/pull/240 .. _changelog-v6.4.1: v6.4.1 (2020-05-15) =================== 🪲 Bug Fixes ------------ * Convert ``\r\n`` to ``\n`` in commit messages, closes `#239`_ (`34acbbc`_) .. _#239: https://github.com/python-semantic-release/python-semantic-release/issues/239 .. _34acbbc: https://github.com/python-semantic-release/python-semantic-release/commit/34acbbcd25320a9d18dcd1a4f43e1ce1837b2c9f .. _changelog-v6.4.0: v6.4.0 (2020-05-15) =================== ✨ Features ----------- * **history**: Create emoji parser (`PR#238`_, `2e1c50a`_) 🪲 Bug Fixes ------------ * Add emojis to default changelog_sections (`PR#238`_, `2e1c50a`_) * Include all parsed types in changelog (`PR#238`_, `2e1c50a`_) 📖 Documentation ---------------- * Add documentation for emoji parser (`PR#238`_, `2e1c50a`_) â™»ï¸ Refactoring --------------- * **history**: Get breaking changes in parser (`PR#238`_, `2e1c50a`_) .. _2e1c50a: https://github.com/python-semantic-release/python-semantic-release/commit/2e1c50a865628b372f48945a039a3edb38a7cdf0 .. _PR#238: https://github.com/python-semantic-release/python-semantic-release/pull/238 .. _changelog-v6.3.1: v6.3.1 (2020-05-11) =================== 🪲 Bug Fixes ------------ * Use getboolean for commit_version_number, closes `#186`_ (`a60e0b4`_) .. _#186: https://github.com/python-semantic-release/python-semantic-release/issues/186 .. _a60e0b4: https://github.com/python-semantic-release/python-semantic-release/commit/a60e0b4e3cadf310c3e0ad67ebeb4e69d0ee50cb .. _changelog-v6.3.0: v6.3.0 (2020-05-09) =================== ✨ Features ----------- * **history**: Support linking compare page in changelog, closes `#218`_ (`79a8e02`_) 📖 Documentation ---------------- * Document compare_link option (`e52c355`_) * Rewrite commit-log-parsing.rst (`4c70f4f`_) .. _#218: https://github.com/python-semantic-release/python-semantic-release/issues/218 .. _4c70f4f: https://github.com/python-semantic-release/python-semantic-release/commit/4c70f4f2aa3343c966d1b7ab8566fcc782242ab9 .. _79a8e02: https://github.com/python-semantic-release/python-semantic-release/commit/79a8e02df82fbc2acecaad9e9ff7368e61df3e54 .. _e52c355: https://github.com/python-semantic-release/python-semantic-release/commit/e52c355c0d742ddd2cfa65d42888296942e5bec5 .. _changelog-v6.2.0: v6.2.0 (2020-05-02) =================== ✨ Features ----------- * **history**: Check all paragraphs for breaking changes, closes `#200`_ (`fec08f0`_) 📖 Documentation ---------------- * Add = to verbosity option, closes `#227`_ (`a0f4c9c`_) * Use references where possible, closes `#221`_ (`f38e5d4`_) .. _#200: https://github.com/python-semantic-release/python-semantic-release/issues/200 .. _#221: https://github.com/python-semantic-release/python-semantic-release/issues/221 .. _#227: https://github.com/python-semantic-release/python-semantic-release/issues/227 .. _a0f4c9c: https://github.com/python-semantic-release/python-semantic-release/commit/a0f4c9cd397fcb98f880097319c08160adb3c3e6 .. _f38e5d4: https://github.com/python-semantic-release/python-semantic-release/commit/f38e5d4a1597cddb69ce47a4d79b8774e796bf41 .. _fec08f0: https://github.com/python-semantic-release/python-semantic-release/commit/fec08f0dbd7ae15f95ca9c41a02c9fe6d448ede0 .. _changelog-v6.1.0: v6.1.0 (2020-04-26) =================== ✨ Features ----------- * **actions**: Support PYPI_TOKEN on GitHub Actions (`df2c080`_) * **pypi**: Support easier use of API tokens, closes `#213`_ (`bac135c`_) 📖 Documentation ---------------- * Add documentation for PYPI_TOKEN (`a8263a0`_) .. _#213: https://github.com/python-semantic-release/python-semantic-release/issues/213 .. _a8263a0: https://github.com/python-semantic-release/python-semantic-release/commit/a8263a066177d1d42f2844e4cb42a76a23588500 .. _bac135c: https://github.com/python-semantic-release/python-semantic-release/commit/bac135c0ae7a6053ecfc7cdf2942c3c89640debf .. _df2c080: https://github.com/python-semantic-release/python-semantic-release/commit/df2c0806f0a92186e914cfc8cc992171d74422df .. _changelog-v6.0.1: v6.0.1 (2020-04-15) =================== 🪲 Bug Fixes ------------ * **hvcs**: Convert get_hvcs to use LoggedFunction (`3084249`_) .. _3084249: https://github.com/python-semantic-release/python-semantic-release/commit/308424933fd3375ca3730d9eaf8abbad2435830b .. _changelog-v6.0.0: v6.0.0 (2020-04-15) =================== 📖 Documentation ---------------- * Create Read the Docs config file (`aa5a1b7`_) * Include README.rst in index.rst (`8673a9d`_) * Move action.rst into main documentation (`509ccaf`_) * Rewrite README.rst (`e049772`_) * Rewrite troubleshooting page (`0285de2`_) â™»ï¸ Refactoring --------------- * **debug**: Use logging and click_log instead of ndebug (`15b1f65`_) 💥 BREAKING CHANGES ------------------- * **debug**: ``debug="*"`` no longer has an effect, instead use ``--verbosity DEBUG``. .. _0285de2: https://github.com/python-semantic-release/python-semantic-release/commit/0285de215a8dac3fcc9a51f555fa45d476a56dff .. _15b1f65: https://github.com/python-semantic-release/python-semantic-release/commit/15b1f650f29761e1ab2a91b767cbff79b2057a4c .. _509ccaf: https://github.com/python-semantic-release/python-semantic-release/commit/509ccaf307a0998eced69ad9fee1807132babe28 .. _8673a9d: https://github.com/python-semantic-release/python-semantic-release/commit/8673a9d92a9bf348bb3409e002a830741396c8ca .. _aa5a1b7: https://github.com/python-semantic-release/python-semantic-release/commit/aa5a1b700a1c461c81c6434686cb6f0504c4bece .. _e049772: https://github.com/python-semantic-release/python-semantic-release/commit/e049772cf14cdd49538cf357db467f0bf3fe9587 .. _changelog-v5.2.0: v5.2.0 (2020-04-09) =================== ✨ Features ----------- * **github**: Add tag as default release name (`2997908`_) 📖 Documentation ---------------- * Automate API docs (`7d4fea2`_) .. _2997908: https://github.com/python-semantic-release/python-semantic-release/commit/2997908f80f4fcec56917d237a079b961a06f990 .. _7d4fea2: https://github.com/python-semantic-release/python-semantic-release/commit/7d4fea266cc75007de51609131eb6d1e324da608 .. _changelog-v5.1.0: v5.1.0 (2020-04-04) =================== ✨ Features ----------- * **history**: Allow customizing changelog_sections (`PR#207`_, `d5803d5`_) 📖 Documentation ---------------- * Improve formatting of configuration page (`9a8e22e`_) * Improve formatting of envvars page (`b376a56`_) * Update index.rst (`b27c26c`_) .. _9a8e22e: https://github.com/python-semantic-release/python-semantic-release/commit/9a8e22e838d7dbf3bfd941397c3b39560aca6451 .. _b27c26c: https://github.com/python-semantic-release/python-semantic-release/commit/b27c26c66e7e41843ab29076f7e724908091b46e .. _b376a56: https://github.com/python-semantic-release/python-semantic-release/commit/b376a567bfd407a507ce0752614b0ca75a0f2973 .. _d5803d5: https://github.com/python-semantic-release/python-semantic-release/commit/d5803d5c1668d86482a31ac0853bac7ecfdc63bc .. _PR#207: https://github.com/python-semantic-release/python-semantic-release/pull/207 .. _changelog-v5.0.3: v5.0.3 (2020-03-26) =================== 🪲 Bug Fixes ------------ * Bump dependencies and fix Windows issues on Development (`PR#173`_, `0a6f8c3`_) * Missing mime types on Windows (`PR#173`_, `0a6f8c3`_) .. _0a6f8c3: https://github.com/python-semantic-release/python-semantic-release/commit/0a6f8c3842b05f5f424dad5ce1fa5e3823c7e688 .. _PR#173: https://github.com/python-semantic-release/python-semantic-release/pull/173 .. _changelog-v5.0.2: v5.0.2 (2020-03-22) =================== 🪲 Bug Fixes ------------ * **history**: Leave case of other characters unchanged (`96ba94c`_) .. _96ba94c: https://github.com/python-semantic-release/python-semantic-release/commit/96ba94c4b4593997343ec61ecb6c823c1494d0e2 .. _changelog-v5.0.1: v5.0.1 (2020-03-22) =================== 🪲 Bug Fixes ------------ * Make action use current version of semantic-release (`123984d`_) .. _123984d: https://github.com/python-semantic-release/python-semantic-release/commit/123984d735181c622f3d99088a1ad91321192a11 .. _changelog-v5.0.0: v5.0.0 (2020-03-22) =================== ✨ Features ----------- * **build**: Allow config setting for build command, closes `#188`_ (`PR#195`_, `740f4bd`_) 🪲 Bug Fixes ------------ * Rename default of build_command config (`d5db22f`_) 📖 Documentation ---------------- * **pypi**: Update docstrings in pypi.py (`6502d44`_) 💥 BREAKING CHANGES ------------------- * **build**: Previously the build_commands configuration variable set the types of bundles sent to ``python setup.py``. It has been replaced by the configuration variable ``build_command`` which takes the full command e.g. ``python setup.py sdist`` or ``poetry build``. .. _#188: https://github.com/python-semantic-release/python-semantic-release/issues/188 .. _6502d44: https://github.com/python-semantic-release/python-semantic-release/commit/6502d448fa65e5dc100e32595e83fff6f62a881a .. _740f4bd: https://github.com/python-semantic-release/python-semantic-release/commit/740f4bdb26569362acfc80f7e862fc2c750a46dd .. _d5db22f: https://github.com/python-semantic-release/python-semantic-release/commit/d5db22f9f7acd05d20fd60a8b4b5a35d4bbfabb8 .. _PR#195: https://github.com/python-semantic-release/python-semantic-release/pull/195 .. _changelog-v4.11.0: v4.11.0 (2020-03-22) ==================== ✨ Features ----------- * **actions**: Create GitHub Action (`350245d`_) 📖 Documentation ---------------- * Make AUTHORS.rst dynamic (`db2e076`_) * **readme**: Fix minor typo (`c22f69f`_) .. _350245d: https://github.com/python-semantic-release/python-semantic-release/commit/350245dbfb07ed6a1db017b1d9d1072b368b1497 .. _c22f69f: https://github.com/python-semantic-release/python-semantic-release/commit/c22f69f62a215ff65e1ab6dcaa8e7e9662692e64 .. _db2e076: https://github.com/python-semantic-release/python-semantic-release/commit/db2e0762f3189d0f1a6ba29aad32bdefb7e0187f .. _changelog-v4.10.0: v4.10.0 (2020-03-03) ==================== ✨ Features ----------- * Make commit message configurable (`PR#184`_, `eb0762c`_) .. _eb0762c: https://github.com/python-semantic-release/python-semantic-release/commit/eb0762ca9fea5cecd5c7b182504912a629be473b .. _PR#184: https://github.com/python-semantic-release/python-semantic-release/pull/184 .. _changelog-v4.9.0: v4.9.0 (2020-03-02) =================== ✨ Features ----------- * **pypi**: Add build_commands config (`22146ea`_) 🪲 Bug Fixes ------------ * **pypi**: Change bdist_wheels to bdist_wheel (`c4db509`_) .. _22146ea: https://github.com/python-semantic-release/python-semantic-release/commit/22146ea4b94466a90d60b94db4cc65f46da19197 .. _c4db509: https://github.com/python-semantic-release/python-semantic-release/commit/c4db50926c03f3d551c8331932c567c7bdaf4f3d .. _changelog-v4.8.0: v4.8.0 (2020-02-28) =================== ✨ Features ----------- * **git**: Add a new config for commit author (`aa2c22c`_) .. _aa2c22c: https://github.com/python-semantic-release/python-semantic-release/commit/aa2c22c469448fe57f02bea67a02f998ce519ac3 .. _changelog-v4.7.1: v4.7.1 (2020-02-28) =================== 🪲 Bug Fixes ------------ * Repair parsing of remotes in the gitlab ci format, closes `#181`_ (`0fddbe2`_) .. _#181: https://github.com/python-semantic-release/python-semantic-release/issues/181 .. _0fddbe2: https://github.com/python-semantic-release/python-semantic-release/commit/0fddbe2fb70d24c09ceddb789a159162a45942dc .. _changelog-v4.7.0: v4.7.0 (2020-02-28) =================== ✨ Features ----------- * Upload distribution files to GitHub Releases (`PR#177`_, `e427658`_) * **github**: Upload dists to release (`PR#177`_, `e427658`_) 🪲 Bug Fixes ------------ * Post changelog after PyPI upload (`PR#177`_, `e427658`_) * Support repository owner names containing dots, closes `#179`_ (`a6c4da4`_) * **github**: Fix upload of .whl files (`PR#177`_, `e427658`_) * **github**: Use application/octet-stream for .whl files (`90a7e47`_) 📖 Documentation ---------------- * Document upload_to_release config option (`PR#177`_, `e427658`_) .. _#179: https://github.com/python-semantic-release/python-semantic-release/issues/179 .. _90a7e47: https://github.com/python-semantic-release/python-semantic-release/commit/90a7e476a04d26babc88002e9035cad2ed485b07 .. _a6c4da4: https://github.com/python-semantic-release/python-semantic-release/commit/a6c4da4c0e6bd8a37f64544f7813fa027f5054ed .. _e427658: https://github.com/python-semantic-release/python-semantic-release/commit/e427658e33abf518191498c3142a0f18d3150e07 .. _PR#177: https://github.com/python-semantic-release/python-semantic-release/pull/177 .. _changelog-v4.6.0: v4.6.0 (2020-02-19) =================== ✨ Features ----------- * **history**: Capitalize changelog messages (`1a8e306`_) 🪲 Bug Fixes ------------ * Add more debug statements in logs (`bc931ec`_) * Only overwrite with patch if bump is None, closes `#159`_ (`1daa4e2`_) .. _#159: https://github.com/python-semantic-release/python-semantic-release/issues/159 .. _1a8e306: https://github.com/python-semantic-release/python-semantic-release/commit/1a8e3060b8f6d6362c27903dcfc69d17db5f1d36 .. _1daa4e2: https://github.com/python-semantic-release/python-semantic-release/commit/1daa4e23ec2dd40c6b490849276524264787e24e .. _bc931ec: https://github.com/python-semantic-release/python-semantic-release/commit/bc931ec46795fde4c1ccee004eec83bf73d5de7a .. _changelog-v4.5.1: v4.5.1 (2020-02-16) =================== 🪲 Bug Fixes ------------ * **github**: Send token in request header, closes `#167`_ (`be9972a`_) 📖 Documentation ---------------- * Add note about automatic releases in readme (`e606e75`_) * Fix broken list in readme (`7aa572b`_) * Update readme and getting started docs (`07b3208`_) .. _#167: https://github.com/python-semantic-release/python-semantic-release/issues/167 .. _07b3208: https://github.com/python-semantic-release/python-semantic-release/commit/07b3208ff64301e544c4fdcb48314e49078fc479 .. _7aa572b: https://github.com/python-semantic-release/python-semantic-release/commit/7aa572b2a323ddbc69686309226395f40c52b469 .. _be9972a: https://github.com/python-semantic-release/python-semantic-release/commit/be9972a7b1fb183f738fb31bd370adb30281e4d5 .. _e606e75: https://github.com/python-semantic-release/python-semantic-release/commit/e606e7583a30167cf7679c6bcada2f9e768b3abe .. _changelog-v4.5.0: v4.5.0 (2020-02-08) =================== ✨ Features ----------- * **history**: Enable colon defined version, closes `#165`_ (`7837f50`_) 🪲 Bug Fixes ------------ * Remove erroneous submodule (`762bfda`_) * **cli**: --noop flag works when before command, closes `#73`_ (`4fcc781`_) .. _#73: https://github.com/python-semantic-release/python-semantic-release/issues/73 .. _#165: https://github.com/python-semantic-release/python-semantic-release/issues/165 .. _4fcc781: https://github.com/python-semantic-release/python-semantic-release/commit/4fcc781d1a3f9235db552f0f4431c9f5e638d298 .. _762bfda: https://github.com/python-semantic-release/python-semantic-release/commit/762bfda728c266b8cd14671d8da9298fc99c63fb .. _7837f50: https://github.com/python-semantic-release/python-semantic-release/commit/7837f5036269328ef29996b9ea63cccd5a6bc2d5 .. _changelog-v4.4.1: v4.4.1 (2020-01-18) =================== 🪲 Bug Fixes ------------ * Add quotes around twine arguments, closes `#163`_ (`46a83a9`_) .. _#163: https://github.com/python-semantic-release/python-semantic-release/issues/163 .. _46a83a9: https://github.com/python-semantic-release/python-semantic-release/commit/46a83a94b17c09d8f686c3ae7b199d7fd0e0e5e5 .. _changelog-v4.4.0: v4.4.0 (2020-01-17) =================== ✨ Features ----------- * **parser**: Add support for exclamation point for breaking changes, closes `#156`_ (`a4f8a10`_) * **parser**: Make BREAKING-CHANGE synonymous with BREAKING CHANGE (`beedccf`_) 🪲 Bug Fixes ------------ * **github**: Add check for GITHUB_ACTOR for git push (`PR#162`_, `c41e9bb`_) .. _#156: https://github.com/python-semantic-release/python-semantic-release/issues/156 .. _a4f8a10: https://github.com/python-semantic-release/python-semantic-release/commit/a4f8a10afcc358a8fbef83be2041129480350be2 .. _beedccf: https://github.com/python-semantic-release/python-semantic-release/commit/beedccfddfb360aeebef595342ee980446012ec7 .. _c41e9bb: https://github.com/python-semantic-release/python-semantic-release/commit/c41e9bb986d01b92d58419cbdc88489d630a11f1 .. _PR#162: https://github.com/python-semantic-release/python-semantic-release/pull/162 .. _changelog-v4.3.4: v4.3.4 (2019-12-17) =================== 🪲 Bug Fixes ------------ * Fallback to whole log if correct tag is not available, closes `#51`_ (`PR#157`_, `252bffd`_) .. _#51: https://github.com/python-semantic-release/python-semantic-release/issues/51 .. _252bffd: https://github.com/python-semantic-release/python-semantic-release/commit/252bffd3be7b6dfcfdb384d24cb1cd83d990fc9a .. _PR#157: https://github.com/python-semantic-release/python-semantic-release/pull/157 .. _changelog-v4.3.3: v4.3.3 (2019-11-06) =================== 🪲 Bug Fixes ------------ * Instead of requiring click 7.0, looks like all tests will pass with at least 2.0. (`PR#155`_, `f07c7f6`_) * Set version of click to >=2.0,<8.0. (`PR#155`_, `f07c7f6`_) * Upgrade to click 7.0, closes `#117`_ (`PR#155`_, `f07c7f6`_) .. _#117: https://github.com/python-semantic-release/python-semantic-release/issues/117 .. _f07c7f6: https://github.com/python-semantic-release/python-semantic-release/commit/f07c7f653be1c018e443f071d9a196d9293e9521 .. _PR#155: https://github.com/python-semantic-release/python-semantic-release/pull/155 .. _changelog-v4.3.2: v4.3.2 (2019-10-05) =================== 🪲 Bug Fixes ------------ * Update regex to get repository owner and name for project with dots, closes `#151`_ (`2778e31`_) .. _#151: https://github.com/python-semantic-release/python-semantic-release/issues/151 .. _2778e31: https://github.com/python-semantic-release/python-semantic-release/commit/2778e316a0c0aa931b1012cb3862d04659c05e73 .. _changelog-v4.3.1: v4.3.1 (2019-09-29) =================== 🪲 Bug Fixes ------------ * Support repo urls without git terminator (`700e9f1`_) .. _700e9f1: https://github.com/python-semantic-release/python-semantic-release/commit/700e9f18dafde1833f482272a72bb80b54d56bb3 .. _changelog-v4.3.0: v4.3.0 (2019-09-06) =================== ✨ Features ----------- * Add the possibility to load configuration from pyproject.toml (`35f8bfe`_) * Allow the override of configuration options from cli, closes `#119`_ (`f0ac82f`_) * Allow users to get version from tag and write/commit bump to file, closes `#104`_ (`1f9fe1c`_) * Make the vcs functionalities work with gitlab, closes `#121`_ (`82d555d`_) 🪲 Bug Fixes ------------ * Manage subgroups in git remote url, closes `#139`_, `#140`_ (`4b11875`_) * Update list of commit types to include build, ci and perf, closes `#145`_ (`41ea12f`_) .. _#104: https://github.com/python-semantic-release/python-semantic-release/issues/104 .. _#119: https://github.com/python-semantic-release/python-semantic-release/issues/119 .. _#121: https://github.com/python-semantic-release/python-semantic-release/issues/121 .. _#139: https://github.com/python-semantic-release/python-semantic-release/issues/139 .. _#140: https://github.com/python-semantic-release/python-semantic-release/issues/140 .. _#145: https://github.com/python-semantic-release/python-semantic-release/issues/145 .. _1f9fe1c: https://github.com/python-semantic-release/python-semantic-release/commit/1f9fe1cc7666d47cc0c348c4705b63c39bf10ecc .. _35f8bfe: https://github.com/python-semantic-release/python-semantic-release/commit/35f8bfef443c8b69560c918f4b13bc766fb3daa2 .. _41ea12f: https://github.com/python-semantic-release/python-semantic-release/commit/41ea12fa91f97c0046178806bce3be57c3bc2308 .. _4b11875: https://github.com/python-semantic-release/python-semantic-release/commit/4b118754729094e330389712cf863e1c6cefee69 .. _82d555d: https://github.com/python-semantic-release/python-semantic-release/commit/82d555d45b9d9e295ef3f9546a6ca2a38ca4522e .. _f0ac82f: https://github.com/python-semantic-release/python-semantic-release/commit/f0ac82fe59eb59a768a73a1bf2ea934b9d448c58 .. _changelog-v4.2.0: v4.2.0 (2019-08-05) =================== ✨ Features ----------- * Add configuration to customize handling of dists, closes `#115`_ (`2af6f41`_) * Add support for configuring branch, closes `#43`_ (`14abb05`_) * Add support for showing unreleased changelog, closes `#134`_ (`41ef794`_) 🪲 Bug Fixes ------------ * Add commit hash when generating breaking changes, closes `#120`_ (`0c74faf`_) * Kept setting new version for tag source (`0e24a56`_) * Remove deletion of build folder, closes `#115`_ (`b45703d`_) * Updated the tag tests (`3303eef`_) * Upgrade click to 7.0 (`2c5dd80`_) .. _#43: https://github.com/python-semantic-release/python-semantic-release/issues/43 .. _#115: https://github.com/python-semantic-release/python-semantic-release/issues/115 .. _#120: https://github.com/python-semantic-release/python-semantic-release/issues/120 .. _#134: https://github.com/python-semantic-release/python-semantic-release/issues/134 .. _0c74faf: https://github.com/python-semantic-release/python-semantic-release/commit/0c74fafdfa81cf2e13db8f4dcf0a6f7347552504 .. _0e24a56: https://github.com/python-semantic-release/python-semantic-release/commit/0e24a5633f8f94b48da97b011634d4f9d84f7b4b .. _14abb05: https://github.com/python-semantic-release/python-semantic-release/commit/14abb05e7f878e88002f896812d66b4ea5c219d4 .. _2af6f41: https://github.com/python-semantic-release/python-semantic-release/commit/2af6f41b21205bdd192514a434fca2feba17725a .. _2c5dd80: https://github.com/python-semantic-release/python-semantic-release/commit/2c5dd809b84c2157a5e6cdcc773c43ec864f0328 .. _3303eef: https://github.com/python-semantic-release/python-semantic-release/commit/3303eefa49a0474bbd85df10ae186ccbf9090ec1 .. _41ef794: https://github.com/python-semantic-release/python-semantic-release/commit/41ef7947ad8a07392c96c7540980476e989c1d83 .. _b45703d: https://github.com/python-semantic-release/python-semantic-release/commit/b45703dad38c29b28575060b21e5fb0f8482c6b1 .. _changelog-v4.1.2: v4.1.2 (2019-08-04) =================== 🪲 Bug Fixes ------------ * Correct isort build fail (`0037210`_) * Make sure the history only breaks loop for version commit, closes `#135`_ (`5dc6cfc`_) * **vcs**: Allow cli to be run from subdirectory (`fb7bb14`_) 📖 Documentation ---------------- * **circleci**: Point badge to master branch (`9c7302e`_) .. _#135: https://github.com/python-semantic-release/python-semantic-release/issues/135 .. _0037210: https://github.com/python-semantic-release/python-semantic-release/commit/00372100b527ff9308d9e43fe5c65cdf179dc4dc .. _5dc6cfc: https://github.com/python-semantic-release/python-semantic-release/commit/5dc6cfc634254f09997bb3cb0f17abd296e2c01f .. _9c7302e: https://github.com/python-semantic-release/python-semantic-release/commit/9c7302e184a1bd88f39b3039691b55cd77f0bb07 .. _fb7bb14: https://github.com/python-semantic-release/python-semantic-release/commit/fb7bb14300e483626464795b8ff4f033a194cf6f .. _changelog-v4.1.1: v4.1.1 (2019-02-15) =================== 📖 Documentation ---------------- * Correct usage of changelog (`f4f59b0`_) * Debug usage and related (`f08e594`_) * Describing the commands (`b6fa04d`_) * Update url for commit guidelinesThe guidelines can now be found in theDEVELOPERS.md in angular. (`90c1b21`_) .. _90c1b21: https://github.com/python-semantic-release/python-semantic-release/commit/90c1b217f86263301b91d19d641c7b348e37d960 .. _b6fa04d: https://github.com/python-semantic-release/python-semantic-release/commit/b6fa04db3044525a1ee1b5952fb175a706842238 .. _f08e594: https://github.com/python-semantic-release/python-semantic-release/commit/f08e5943a9876f2d17a7c02f468720995c7d9ffd .. _f4f59b0: https://github.com/python-semantic-release/python-semantic-release/commit/f4f59b08c73700c6ee04930221bfcb1355cbc48d .. _changelog-v4.1.0: v4.1.0 (2019-01-31) =================== ✨ Features ----------- * **ci_checks**: Add support for bitbucket (`9fc120d`_) 🪲 Bug Fixes ------------ * Initialize git Repo from current folder (`c7415e6`_) * Maintain version variable formatting on bump (`PR#103`_, `bf63156`_) * Use same changelog code for command as post (`248f622`_) 📖 Documentation ---------------- * Add installation instructions for development (`PR#106`_, `9168d0e`_) * **readme**: Add testing instructions (`bb352f5`_) .. _248f622: https://github.com/python-semantic-release/python-semantic-release/commit/248f62283c59182868c43ff105a66d85c923a894 .. _9168d0e: https://github.com/python-semantic-release/python-semantic-release/commit/9168d0ea56734319a5d77e890f23ff6ba51cc97d .. _9fc120d: https://github.com/python-semantic-release/python-semantic-release/commit/9fc120d1a7e4acbbca609628e72651685108b364 .. _bb352f5: https://github.com/python-semantic-release/python-semantic-release/commit/bb352f5b6616cc42c9f2f2487c51dedda1c68295 .. _bf63156: https://github.com/python-semantic-release/python-semantic-release/commit/bf63156f60340614fae94c255fb2f097cf317b2b .. _c7415e6: https://github.com/python-semantic-release/python-semantic-release/commit/c7415e634c0affbe6396e0aa2bafe7c1b3368914 .. _PR#103: https://github.com/python-semantic-release/python-semantic-release/pull/103 .. _PR#106: https://github.com/python-semantic-release/python-semantic-release/pull/106 .. _changelog-v4.0.1: v4.0.1 (2019-01-12) =================== 🪲 Bug Fixes ------------ * Add better error message when pypi credentials are empty, closes `#96`_ (`c4e5dcb`_) * Clean out dist and build before building, closes `#86`_ (`b628e46`_) * Filter out pypi secrets from exceptions, closes `#41`_ (`5918371`_) * Unfreeze dependencies, closes `#100`_ (`847833b`_) * Use correct syntax to exclude tests in package, closes `#92`_ (`3e41e91`_) * **parser_angular**: Fix non-match when special chars in scope (`8a33123`_) 📖 Documentation ---------------- * Remove reference to gitter, closes `#90`_ (`896e37b`_) .. _#41: https://github.com/python-semantic-release/python-semantic-release/issues/41 .. _#86: https://github.com/python-semantic-release/python-semantic-release/issues/86 .. _#90: https://github.com/python-semantic-release/python-semantic-release/issues/90 .. _#92: https://github.com/python-semantic-release/python-semantic-release/issues/92 .. _#96: https://github.com/python-semantic-release/python-semantic-release/issues/96 .. _#100: https://github.com/python-semantic-release/python-semantic-release/issues/100 .. _3e41e91: https://github.com/python-semantic-release/python-semantic-release/commit/3e41e91c318663085cd28c8165ece21d7e383475 .. _5918371: https://github.com/python-semantic-release/python-semantic-release/commit/5918371c1e82b06606087c9945d8eaf2604a0578 .. _847833b: https://github.com/python-semantic-release/python-semantic-release/commit/847833bf48352a4935f906d0c3f75e1db596ca1c .. _896e37b: https://github.com/python-semantic-release/python-semantic-release/commit/896e37b95cc43218e8f593325dd4ea63f8b895d9 .. _8a33123: https://github.com/python-semantic-release/python-semantic-release/commit/8a331232621b26767e4268079f9295bf695047ab .. _b628e46: https://github.com/python-semantic-release/python-semantic-release/commit/b628e466f86bc27cbe45ec27a02d4774a0efd3bb .. _c4e5dcb: https://github.com/python-semantic-release/python-semantic-release/commit/c4e5dcbeda0ce8f87d25faefb4d9ae3581029a8f .. _changelog-v4.0.0: v4.0.0 (2018-11-22) =================== ✨ Features ----------- * Add support for commit_message config variable (`4de5400`_) * **CI checks**: Add support for GitLab CI checks, closes `#88`_ (`8df5e2b`_) 🪲 Bug Fixes ------------ * Add check of credentials (`7d945d4`_) * Add credentials check (`0694604`_) * Add dists to twine call (`1cec2df`_) * Change requests from fixed version to version range (`PR#93`_, `af3ad59`_) * Re-add skip-existing (`366e9c1`_) * Remove repository argument in twine (`e24543b`_) * Remove universal from setup config (`18b2402`_) * Update twine (`c4ae7b8`_) * Use new interface for twine (`c04872d`_) * Use twine through cli call (`ab84beb`_) 📖 Documentation ---------------- * Add type hints and more complete docstrings, closes `#81`_ (`a6d5e9b`_) * Fix typo in documentation index (`da6844b`_) â™»ï¸ Refactoring --------------- * Remove support for python 2 (`85fe638`_) 💥 BREAKING CHANGES ------------------- * If you rely on the commit message to be the version number only, this will break your code * This will only work with python 3 after this commit. .. _#81: https://github.com/python-semantic-release/python-semantic-release/issues/81 .. _#88: https://github.com/python-semantic-release/python-semantic-release/issues/88 .. _0694604: https://github.com/python-semantic-release/python-semantic-release/commit/0694604f3b3d2159a4037620605ded09236cdef5 .. _18b2402: https://github.com/python-semantic-release/python-semantic-release/commit/18b24025e397aace03dd5bb9eed46cfdd13491bd .. _1cec2df: https://github.com/python-semantic-release/python-semantic-release/commit/1cec2df8bcb7f877c813d6470d454244630b050a .. _366e9c1: https://github.com/python-semantic-release/python-semantic-release/commit/366e9c1d0b9ffcde755407a1de18e8295f6ad3a1 .. _4de5400: https://github.com/python-semantic-release/python-semantic-release/commit/4de540011ab10483ee1865f99c623526cf961bb9 .. _7d945d4: https://github.com/python-semantic-release/python-semantic-release/commit/7d945d44b36b3e8c0b7771570cb2305e9e09d0b2 .. _85fe638: https://github.com/python-semantic-release/python-semantic-release/commit/85fe6384c15db317bc7142f4c8bbf2da58cece58 .. _8df5e2b: https://github.com/python-semantic-release/python-semantic-release/commit/8df5e2bdd33a620e683f3adabe174e94ceaa88d9 .. _a6d5e9b: https://github.com/python-semantic-release/python-semantic-release/commit/a6d5e9b1ccbe75d59e7240528593978a19d8d040 .. _ab84beb: https://github.com/python-semantic-release/python-semantic-release/commit/ab84beb8f809e39ae35cd3ce5c15df698d8712fd .. _af3ad59: https://github.com/python-semantic-release/python-semantic-release/commit/af3ad59f018876e11cc3acdda0b149f8dd5606bd .. _c04872d: https://github.com/python-semantic-release/python-semantic-release/commit/c04872d00a26e9bf0f48eeacb360b37ce0fba01e .. _c4ae7b8: https://github.com/python-semantic-release/python-semantic-release/commit/c4ae7b8ecc682855a8568b247690eaebe62d2d26 .. _da6844b: https://github.com/python-semantic-release/python-semantic-release/commit/da6844bce0070a0020bf13950bd136fe28262602 .. _e24543b: https://github.com/python-semantic-release/python-semantic-release/commit/e24543b96adb208897f4ce3eaab96b2f4df13106 .. _PR#93: https://github.com/python-semantic-release/python-semantic-release/pull/93 .. _changelog-v3.11.2: v3.11.2 (2018-06-10) ==================== 🪲 Bug Fixes ------------ * Upgrade twine (`9722313`_) .. _9722313: https://github.com/python-semantic-release/python-semantic-release/commit/9722313eb63c7e2c32c084ad31bed7ee1c48a928 .. _changelog-v3.11.1: v3.11.1 (2018-06-06) ==================== 🪲 Bug Fixes ------------ * Change Gitpython version number, closes `#80`_ (`23c9d4b`_) 📖 Documentation ---------------- * Add retry option to cli docs (`021da50`_) .. _#80: https://github.com/python-semantic-release/python-semantic-release/issues/80 .. _021da50: https://github.com/python-semantic-release/python-semantic-release/commit/021da5001934f3199c98d7cf29f62a3ad8c2e56a .. _23c9d4b: https://github.com/python-semantic-release/python-semantic-release/commit/23c9d4b6a1716e65605ed985881452898d5cf644 .. _changelog-v3.11.0: v3.11.0 (2018-04-12) ==================== ✨ Features ----------- * Add --retry cli option (`PR#78`_, `3e312c0`_) * Add support to finding previous version from tags if not using commit messages (`PR#68`_, `6786487`_) * Be a bit more forgiving to find previous tags (`PR#68`_, `6786487`_) 🪲 Bug Fixes ------------ * Add pytest cache to gitignore (`b8efd5a`_) * Make repo non if it is not a git repository, closes `#74`_ (`1dc306b`_) 📖 Documentation ---------------- * Define ``--retry`` usage (`3e312c0`_) * Remove old notes about trello board (`7f50c52`_) * Update status badges (`cfa13b8`_) .. _#74: https://github.com/python-semantic-release/python-semantic-release/issues/74 .. _1dc306b: https://github.com/python-semantic-release/python-semantic-release/commit/1dc306b9b1db2ac360211bdc61fd815302d0014c .. _3e312c0: https://github.com/python-semantic-release/python-semantic-release/commit/3e312c0ce79a78d25016a3b294b772983cfb5e0f .. _6786487: https://github.com/python-semantic-release/python-semantic-release/commit/6786487ebf4ab481139ef9f43cd74e345debb334 .. _7f50c52: https://github.com/python-semantic-release/python-semantic-release/commit/7f50c521a522bb0c4579332766248778350e205b .. _b8efd5a: https://github.com/python-semantic-release/python-semantic-release/commit/b8efd5a6249c79c8378bffea3e245657e7094ec9 .. _cfa13b8: https://github.com/python-semantic-release/python-semantic-release/commit/cfa13b8260e3f3b0bfcb395f828ad63c9c5e3ca5 .. _PR#68: https://github.com/python-semantic-release/python-semantic-release/pull/68 .. _PR#78: https://github.com/python-semantic-release/python-semantic-release/pull/78 .. _changelog-v3.10.3: v3.10.3 (2018-01-29) ==================== 🪲 Bug Fixes ------------ * Error when not in git repository, closes `#74`_ (`PR#75`_, `251b190`_) .. _#74: https://github.com/python-semantic-release/python-semantic-release/issues/74 .. _251b190: https://github.com/python-semantic-release/python-semantic-release/commit/251b190a2fd5df68892346926d447cbc1b32475a .. _PR#75: https://github.com/python-semantic-release/python-semantic-release/pull/75 .. _changelog-v3.10.2: v3.10.2 (2017-08-03) ==================== 🪲 Bug Fixes ------------ * Update call to upload to work with twine 1.9.1 (`PR#72`_, `8f47643`_) .. _8f47643: https://github.com/python-semantic-release/python-semantic-release/commit/8f47643c54996e06c358537115e7e17b77cb02ca .. _PR#72: https://github.com/python-semantic-release/python-semantic-release/pull/72 .. _changelog-v3.10.1: v3.10.1 (2017-07-22) ==================== 🪲 Bug Fixes ------------ * Update Twine (`PR#69`_, `9f268c3`_) .. _9f268c3: https://github.com/python-semantic-release/python-semantic-release/commit/9f268c373a932621771abbe9607b739b1e331409 .. _PR#69: https://github.com/python-semantic-release/python-semantic-release/pull/69 .. _changelog-v3.10.0: v3.10.0 (2017-05-05) ==================== ✨ Features ----------- * Add git hash to the changelog (`PR#65`_, `628170e`_) 🪲 Bug Fixes ------------ * Make changelog problems not fail whole publish (`b5a68cf`_) 📖 Documentation ---------------- * Fix typo in cli.py docstring (`PR#64`_, `0d13985`_) .. _0d13985: https://github.com/python-semantic-release/python-semantic-release/commit/0d139859cd71f2d483f4360f196d6ef7c8726c18 .. _628170e: https://github.com/python-semantic-release/python-semantic-release/commit/628170ebc440fc6abf094dd3e393f40576dedf9b .. _b5a68cf: https://github.com/python-semantic-release/python-semantic-release/commit/b5a68cf6177dc0ed80eda722605db064f3fe2062 .. _PR#64: https://github.com/python-semantic-release/python-semantic-release/pull/64 .. _PR#65: https://github.com/python-semantic-release/python-semantic-release/pull/65 .. _changelog-v3.9.0: v3.9.0 (2016-07-03) =================== ✨ Features ----------- * Add option for choosing between versioning by commit or tag (`c0cd1f5`_) * Don't use file to track version, only tag to commit for versioning (`cd25862`_) * Get repo version from historical tags instead of config file (`a45a9bf`_) 🪲 Bug Fixes ------------ * Can't get the proper last tag from commit history (`5a0e681`_) .. _5a0e681: https://github.com/python-semantic-release/python-semantic-release/commit/5a0e681e256ec511cd6c6a8edfee9d905891da10 .. _a45a9bf: https://github.com/python-semantic-release/python-semantic-release/commit/a45a9bfb64538efeb7f6f42bb6e7ede86a4ddfa8 .. _c0cd1f5: https://github.com/python-semantic-release/python-semantic-release/commit/c0cd1f5b2e0776d7b636c3dd9e5ae863125219e6 .. _cd25862: https://github.com/python-semantic-release/python-semantic-release/commit/cd258623ee518c009ae921cd6bb3119dafae43dc .. _changelog-v3.8.1: v3.8.1 (2016-04-17) =================== 🪲 Bug Fixes ------------ * Add search_parent_directories option to gitpython (`PR#62`_, `8bf9ce1`_) .. _8bf9ce1: https://github.com/python-semantic-release/python-semantic-release/commit/8bf9ce11137399906f18bc8b25698b6e03a65034 .. _PR#62: https://github.com/python-semantic-release/python-semantic-release/pull/62 .. _changelog-v3.8.0: v3.8.0 (2016-03-21) =================== ✨ Features ----------- * Add ci checks for circle ci (`151d849`_) 🪲 Bug Fixes ------------ * Add git fetch to frigg after success (`74a6cae`_) * Make tag parser work correctly with breaking changes (`9496f6a`_) * Refactoring cli.py to improve --help and error messages (`c79fc34`_) 📖 Documentation ---------------- * Add info about correct commit guidelines (`af35413`_) * Add info about trello board in readme (`5229557`_) * Fix badges in readme (`7f4e549`_) * Update info about releases in contributing.md (`466f046`_) .. _151d849: https://github.com/python-semantic-release/python-semantic-release/commit/151d84964266c8dca206cef8912391cb73c8f206 .. _466f046: https://github.com/python-semantic-release/python-semantic-release/commit/466f0460774cad86e7e828ffb50c7d1332b64e7b .. _5229557: https://github.com/python-semantic-release/python-semantic-release/commit/5229557099d76b3404ea3677292332442a57ae2e .. _74a6cae: https://github.com/python-semantic-release/python-semantic-release/commit/74a6cae2b46c5150e63136fde0599d98b9486e36 .. _7f4e549: https://github.com/python-semantic-release/python-semantic-release/commit/7f4e5493edb6b3fb3510d0bb78fcc8d23434837f .. _9496f6a: https://github.com/python-semantic-release/python-semantic-release/commit/9496f6a502c79ec3acb4e222e190e76264db02cf .. _af35413: https://github.com/python-semantic-release/python-semantic-release/commit/af35413fae80889e2c5fc6b7d28f77f34b3b4c02 .. _c79fc34: https://github.com/python-semantic-release/python-semantic-release/commit/c79fc3469fb99bf4c7f52434fa9c0891bca757f9 .. _changelog-v3.7.2: v3.7.2 (2016-03-19) =================== 🪲 Bug Fixes ------------ * Move code around a bit to make flake8 happy (`41463b4`_) .. _41463b4: https://github.com/python-semantic-release/python-semantic-release/commit/41463b49b5d44fd94c11ab6e0a81e199510fabec .. _changelog-v3.7.1: v3.7.1 (2016-03-15) =================== 📖 Documentation ---------------- * **configuration**: Fix typo in setup.cfg section (`725d87d`_) .. _725d87d: https://github.com/python-semantic-release/python-semantic-release/commit/725d87dc45857ef2f9fb331222845ac83a3af135 .. _changelog-v3.7.0: v3.7.0 (2016-01-10) =================== ✨ Features ----------- * Add ci_checks for Frigg CI (`577c374`_) .. _577c374: https://github.com/python-semantic-release/python-semantic-release/commit/577c374396fe303b6fe7d64630d2959998d3595c .. _changelog-v3.6.1: v3.6.1 (2016-01-10) =================== 🪲 Bug Fixes ------------ * Add requests as dependency (`4525a70`_) .. _4525a70: https://github.com/python-semantic-release/python-semantic-release/commit/4525a70d5520b44720d385b0307e46fae77a7463 .. _changelog-v3.6.0: v3.6.0 (2015-12-28) =================== ✨ Features ----------- * Add checks for semaphore, closes `#44`_ (`2d7ef15`_) 📖 Documentation ---------------- * Add documentation for configuring on CI (`7806940`_) * Add note about node semantic release (`0d2866c`_) * Add step by step guide for configuring travis ci (`6f23414`_) * Move automatic-releases to subfolder (`ed68e5b`_) * Remove duplicate readme (`42a9421`_) .. _#44: https://github.com/python-semantic-release/python-semantic-release/issues/44 .. _0d2866c: https://github.com/python-semantic-release/python-semantic-release/commit/0d2866c528098ecaf1dd81492f28d3022a2a54e0 .. _2d7ef15: https://github.com/python-semantic-release/python-semantic-release/commit/2d7ef157b1250459060e99601ec53a00942b6955 .. _42a9421: https://github.com/python-semantic-release/python-semantic-release/commit/42a942131947cd1864c1ba29b184caf072408742 .. _6f23414: https://github.com/python-semantic-release/python-semantic-release/commit/6f2341442f61f0284b1119a2c49e96f0be678929 .. _7806940: https://github.com/python-semantic-release/python-semantic-release/commit/7806940ae36cb0d6ac0f966e5d6d911bd09a7d11 .. _ed68e5b: https://github.com/python-semantic-release/python-semantic-release/commit/ed68e5b8d3489463e244b078ecce8eab2cba2bb1 .. _changelog-v3.5.0: v3.5.0 (2015-12-22) =================== ✨ Features ----------- * Add author in commit, closes `#40`_ (`020efaa`_) * Checkout master before publishing (`dc4077a`_) 🪲 Bug Fixes ------------ * Remove " from git push command (`031318b`_) 📖 Documentation ---------------- * Convert readme to rst (`e8a8d26`_) .. _#40: https://github.com/python-semantic-release/python-semantic-release/issues/40 .. _020efaa: https://github.com/python-semantic-release/python-semantic-release/commit/020efaaadf588e3fccd9d2f08a273c37e4158421 .. _031318b: https://github.com/python-semantic-release/python-semantic-release/commit/031318b3268bc37e6847ec049b37425650cebec8 .. _dc4077a: https://github.com/python-semantic-release/python-semantic-release/commit/dc4077a2d07e0522b625336dcf83ee4e0e1640aa .. _e8a8d26: https://github.com/python-semantic-release/python-semantic-release/commit/e8a8d265aa2147824f18065b39a8e7821acb90ec .. _changelog-v3.4.0: v3.4.0 (2015-12-22) =================== ✨ Features ----------- * Add travis environment checks (`f386db7`_) .. _f386db7: https://github.com/python-semantic-release/python-semantic-release/commit/f386db75b77acd521d2f5bde2e1dde99924dc096 .. _changelog-v3.3.3: v3.3.3 (2015-12-22) =================== 🪲 Bug Fixes ------------ * Do git push and git push --tags instead of --follow-tags (`8bc70a1`_) .. _8bc70a1: https://github.com/python-semantic-release/python-semantic-release/commit/8bc70a183fd72f595c72702382bc0b7c3abe99c8 .. _changelog-v3.3.2: v3.3.2 (2015-12-21) =================== 🪲 Bug Fixes ------------ * Change build badge (`0dc068f`_) 📖 Documentation ---------------- * Update docstrings for generate_changelog (`987c6a9`_) .. _0dc068f: https://github.com/python-semantic-release/python-semantic-release/commit/0dc068fff2f8c6914f4abe6c4e5fb2752669159e .. _987c6a9: https://github.com/python-semantic-release/python-semantic-release/commit/987c6a96d15997e38c93a9d841c618c76a385ce7 .. _changelog-v3.3.1: v3.3.1 (2015-12-21) =================== 🪲 Bug Fixes ------------ * Add pandoc to travis settings (`17d40a7`_) * Only list commits from the last version tag, closes `#28`_ (`191369e`_) .. _#28: https://github.com/python-semantic-release/python-semantic-release/issues/28 .. _17d40a7: https://github.com/python-semantic-release/python-semantic-release/commit/17d40a73062ffa774542d0abc0f59fc16b68be37 .. _191369e: https://github.com/python-semantic-release/python-semantic-release/commit/191369ebd68526e5b1afcf563f7d13e18c8ca8bf .. _changelog-v3.3.0: v3.3.0 (2015-12-20) =================== ✨ Features ----------- * Add support for environment variables for pypi credentials (`3b383b9`_) 🪲 Bug Fixes ------------ * Add missing parameters to twine.upload (`4bae22b`_) * Better filtering of github token in push error (`9b31da4`_) * Downgrade twine to version 1.5.0 (`66df378`_) * Make sure the github token is not in the output (`55356b7`_) * Push to master by default (`a0bb023`_) .. _3b383b9: https://github.com/python-semantic-release/python-semantic-release/commit/3b383b92376a7530e89b11de481c4dfdfa273f7b .. _4bae22b: https://github.com/python-semantic-release/python-semantic-release/commit/4bae22bae9b9d9abf669b028ea3af4b3813a1df0 .. _55356b7: https://github.com/python-semantic-release/python-semantic-release/commit/55356b718f74d94dd92e6c2db8a15423a6824eb5 .. _66df378: https://github.com/python-semantic-release/python-semantic-release/commit/66df378330448a313aff7a7c27067adda018904f .. _9b31da4: https://github.com/python-semantic-release/python-semantic-release/commit/9b31da4dc27edfb01f685e6036ddbd4c715c9f60 .. _a0bb023: https://github.com/python-semantic-release/python-semantic-release/commit/a0bb023438a1503f9fdb690d976d71632f19a21f .. _changelog-v3.2.1: v3.2.1 (2015-12-20) =================== 🪲 Bug Fixes ------------ * Add requirements to manifest (`ed25ecb`_) * **pypi**: Add sdist as default in addition to bdist_wheel (`a1a35f4`_) .. _a1a35f4: https://github.com/python-semantic-release/python-semantic-release/commit/a1a35f43175187091f028474db2ebef5bfc77bc0 .. _ed25ecb: https://github.com/python-semantic-release/python-semantic-release/commit/ed25ecbaeec0e20ad3040452a5547bb7d6faf6ad .. _changelog-v3.2.0: v3.2.0 (2015-12-20) =================== ✨ Features ----------- * **angular-parser**: Remove scope requirement (`90c9d8d`_) * **git**: Add push to GH_TOKEN@github-url (`546b5bf`_) 🪲 Bug Fixes ------------ * **deps**: Use one file for requirements (`4868543`_) .. _4868543: https://github.com/python-semantic-release/python-semantic-release/commit/486854393b24803bb2356324e045ccab17510d46 .. _546b5bf: https://github.com/python-semantic-release/python-semantic-release/commit/546b5bf15466c6f5dfe93c1c03ca34604b0326f2 .. _90c9d8d: https://github.com/python-semantic-release/python-semantic-release/commit/90c9d8d4cd6d43be094cda86579e00b507571f98 .. _changelog-v3.1.0: v3.1.0 (2015-08-31) =================== ✨ Features ----------- * **pypi**: Add option to disable pypi upload (`f5cd079`_) .. _f5cd079: https://github.com/python-semantic-release/python-semantic-release/commit/f5cd079edb219de5ad03a71448d578f5f477da9c .. _changelog-v3.0.0: v3.0.0 (2015-08-25) =================== ✨ Features ----------- * **parser**: Add tag parser (`a7f392f`_) 🪲 Bug Fixes ------------ * **errors**: Add exposing of errors in package (`3662d76`_) * **version**: Parse file instead for version (`005dba0`_) .. _005dba0: https://github.com/python-semantic-release/python-semantic-release/commit/005dba0094eeb4098315ef383a746e139ffb504d .. _3662d76: https://github.com/python-semantic-release/python-semantic-release/commit/3662d7663291859dd58a91b4b4ccde4f0edc99b2 .. _a7f392f: https://github.com/python-semantic-release/python-semantic-release/commit/a7f392fd4524cc9207899075631032e438e2593c .. _changelog-v2.1.4: v2.1.4 (2015-08-24) =================== 🪲 Bug Fixes ------------ * **github**: Fix property calls (`7ecdeb2`_) .. _7ecdeb2: https://github.com/python-semantic-release/python-semantic-release/commit/7ecdeb22de96b6b55c5404ebf54a751911c4d8cd .. _changelog-v2.1.3: v2.1.3 (2015-08-22) =================== 🪲 Bug Fixes ------------ * **hvcs**: Make Github.token an property (`37d5e31`_) 📖 Documentation ---------------- * **api**: Update apidocs (`6185380`_) * **parsers**: Add documentation about commit parsers (`9b55422`_) * **readme**: Update readme with information about the changelog command (`56a745e`_) .. _37d5e31: https://github.com/python-semantic-release/python-semantic-release/commit/37d5e3110397596a036def5f1dccf0860964332c .. _56a745e: https://github.com/python-semantic-release/python-semantic-release/commit/56a745ef6fa4edf6f6ba09c78fcc141102cf2871 .. _6185380: https://github.com/python-semantic-release/python-semantic-release/commit/6185380babedbbeab2a2a342f17b4ff3d4df6768 .. _9b55422: https://github.com/python-semantic-release/python-semantic-release/commit/9b554222768036024a133153a559cdfc017c1d91 .. _changelog-v2.1.2: v2.1.2 (2015-08-20) =================== 🪲 Bug Fixes ------------ * **cli**: Fix call to generate_changelog in publish (`5f8bce4`_) .. _5f8bce4: https://github.com/python-semantic-release/python-semantic-release/commit/5f8bce4cbb5e1729e674efd6c651e2531aea2a16 .. _changelog-v2.1.1: v2.1.1 (2015-08-20) =================== 🪲 Bug Fixes ------------ * **history**: Fix issue in get_previous_version (`f961786`_) .. _f961786: https://github.com/python-semantic-release/python-semantic-release/commit/f961786aa3eaa3a620f47cc09243340fd329b9c2 .. _changelog-v2.1.0: v2.1.0 (2015-08-20) =================== ✨ Features ----------- * **cli**: Add the possibility to re-post the changelog (`4d028e2`_) 🪲 Bug Fixes ------------ * **cli**: Fix check of token in changelog command (`cc6e6ab`_) * **github**: Fix the github releases integration (`f0c3c1d`_) * **history**: Fix changelog generation (`f010272`_) .. _4d028e2: https://github.com/python-semantic-release/python-semantic-release/commit/4d028e21b9da01be8caac8f23f2c11e0c087e485 .. _cc6e6ab: https://github.com/python-semantic-release/python-semantic-release/commit/cc6e6abe1e91d3aa24e8d73e704829669bea5fd7 .. _f010272: https://github.com/python-semantic-release/python-semantic-release/commit/f01027203a8ca69d21b4aff689e60e8c8d6f9af5 .. _f0c3c1d: https://github.com/python-semantic-release/python-semantic-release/commit/f0c3c1db97752b71f2153ae9f623501b0b8e2c98 .. _changelog-v2.0.0: v2.0.0 (2015-08-19) =================== ✨ Features ----------- * **cli**: Add command for printing the changelog (`336b8bc`_) * **github**: Add github release changelog helper (`da18795`_) * **history**: Add angular parser (`91e4f0f`_) * **history**: Add generate_changelog function (`347f21a`_) * **history**: Add markdown changelog formatter (`d77b58d`_) * **history**: Set angular parser as the default (`c2cf537`_) * **publish**: Add publishing of changelog to github (`74324ba`_) * **settings**: Add loading of current parser (`7bd0916`_) 🪲 Bug Fixes ------------ * **cli**: Change output indentation on changelog (`2ca41d3`_) * **history**: Fix level id's in angular parser (`2918d75`_) * **history**: Fix regex in angular parser (`974ccda`_) * **history**: Support unexpected types in changelog generator (`13deacf`_) 💥 BREAKING CHANGES ------------------- * **history**: The default parser is now angular. Thus, the default behavior of the commit log evaluator will change. From now on it will use the angular commit message spec to determine the new version. .. _13deacf: https://github.com/python-semantic-release/python-semantic-release/commit/13deacf5d33ed500e4e94ea702a2a16be2aa7c48 .. _2918d75: https://github.com/python-semantic-release/python-semantic-release/commit/2918d759bf462082280ede971a5222fe01634ed8 .. _2ca41d3: https://github.com/python-semantic-release/python-semantic-release/commit/2ca41d3bd1b8b9d9fe7e162772560e3defe2a41e .. _336b8bc: https://github.com/python-semantic-release/python-semantic-release/commit/336b8bcc01fc1029ff37a79c92935d4b8ea69203 .. _347f21a: https://github.com/python-semantic-release/python-semantic-release/commit/347f21a1f8d655a71a0e7d58b64d4c6bc6d0bf31 .. _74324ba: https://github.com/python-semantic-release/python-semantic-release/commit/74324ba2749cdbbe80a92b5abbecfeab04617699 .. _7bd0916: https://github.com/python-semantic-release/python-semantic-release/commit/7bd0916f87a1f9fe839c853eab05cae1af420cd2 .. _91e4f0f: https://github.com/python-semantic-release/python-semantic-release/commit/91e4f0f4269d01b255efcd6d7121bbfd5a682e12 .. _974ccda: https://github.com/python-semantic-release/python-semantic-release/commit/974ccdad392d768af5e187dabc184be9ac3e133d .. _c2cf537: https://github.com/python-semantic-release/python-semantic-release/commit/c2cf537a42beaa60cd372c7c9f8fb45db8085917 .. _d77b58d: https://github.com/python-semantic-release/python-semantic-release/commit/d77b58db4b66aec94200dccab94f483def4dacc9 .. _da18795: https://github.com/python-semantic-release/python-semantic-release/commit/da187951af31f377ac57fe17462551cfd776dc6e .. _changelog-v1.0.0: v1.0.0 (2015-08-04) =================== 💥 Breaking ----------- * Restructure helpers into history and pypi (`00f64e6`_) 📖 Documentation ---------------- * Add automatic publishing documentation, resolves `#18`_ (`58076e6`_) .. _#18: https://github.com/python-semantic-release/python-semantic-release/issues/18 .. _00f64e6: https://github.com/python-semantic-release/python-semantic-release/commit/00f64e623db0e21470d55488c5081e12d6c11fd3 .. _58076e6: https://github.com/python-semantic-release/python-semantic-release/commit/58076e60bf20a5835b112b5e99a86c7425ffe7d9 .. _changelog-v0.9.1: v0.9.1 (2015-08-04) =================== 🪲 Bug Fixes ------------ * Fix ``get_current_head_hash`` to ensure it only returns the hash (`7c28832`_) .. _7c28832: https://github.com/python-semantic-release/python-semantic-release/commit/7c2883209e5bf4a568de60dbdbfc3741d34f38b4 .. _changelog-v0.9.0: v0.9.0 (2015-08-03) =================== ✨ Features ----------- * Add Python 2.7 support, resolves `#10`_ (`c05e13f`_) .. _#10: https://github.com/python-semantic-release/python-semantic-release/issues/10 .. _c05e13f: https://github.com/python-semantic-release/python-semantic-release/commit/c05e13f22163237e963c493ffeda7e140f0202c6 .. _changelog-v0.8.0: v0.8.0 (2015-08-03) =================== ✨ Features ----------- * Add ``check_build_status`` option, resolves `#5`_ (`310bb93`_) * Add ``get_current_head_hash`` in git helpers (`d864282`_) * Add git helper to get owner and name of repo (`f940b43`_) .. _#5: https://github.com/python-semantic-release/python-semantic-release/issues/5 .. _310bb93: https://github.com/python-semantic-release/python-semantic-release/commit/310bb9371673fcf9b7b7be48422b89ab99753f04 .. _d864282: https://github.com/python-semantic-release/python-semantic-release/commit/d864282c498f0025224407b3eeac69522c2a7ca0 .. _f940b43: https://github.com/python-semantic-release/python-semantic-release/commit/f940b435537a3c93ab06170d4a57287546bd8d3b .. _changelog-v0.7.0: v0.7.0 (2015-08-02) =================== ✨ Features ----------- * Add ``patch_without_tag`` option, resolves `#6`_ (`3734a88`_) 📖 Documentation ---------------- * Set up sphinx based documentation, resolves `#1`_ (`41fba78`_) .. _#1: https://github.com/python-semantic-release/python-semantic-release/issues/1 .. _#6: https://github.com/python-semantic-release/python-semantic-release/issues/6 .. _3734a88: https://github.com/python-semantic-release/python-semantic-release/commit/3734a889f753f1b9023876e100031be6475a90d1 .. _41fba78: https://github.com/python-semantic-release/python-semantic-release/commit/41fba78a389a8d841316946757a23a7570763c39 .. _changelog-v0.6.0: v0.6.0 (2015-08-02) =================== ✨ Features ----------- * Add twine for uploads to pypi, resolves `#13`_ (`eec2561`_) .. _#13: https://github.com/python-semantic-release/python-semantic-release/issues/13 .. _eec2561: https://github.com/python-semantic-release/python-semantic-release/commit/eec256115b28b0a18136a26d74cfc3232502f1a6 .. _changelog-v0.5.4: v0.5.4 (2015-07-29) =================== 🪲 Bug Fixes ------------ * Add python2 not supported warning (`e84c4d8`_) .. _e84c4d8: https://github.com/python-semantic-release/python-semantic-release/commit/e84c4d8b6f212aec174baccd188185627b5039b6 .. _changelog-v0.5.3: v0.5.3 (2015-07-28) =================== âš™ï¸ Build System --------------- * Add ``wheel`` as a dependency (`971e479`_) .. _971e479: https://github.com/python-semantic-release/python-semantic-release/commit/971e4795a8b8fea371fcc02dc9221f58a0559f32 .. _changelog-v0.5.2: v0.5.2 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix python wheel tag (`f9ac163`_) .. _f9ac163: https://github.com/python-semantic-release/python-semantic-release/commit/f9ac163491666022c809ad49846f3c61966e10c1 .. _changelog-v0.5.1: v0.5.1 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix push commands (`8374ef6`_) .. _8374ef6: https://github.com/python-semantic-release/python-semantic-release/commit/8374ef6bd78eb564a6d846b882c99a67e116394e .. _changelog-v0.5.0: v0.5.0 (2015-07-28) =================== ✨ Features ----------- * Add setup.py hook for the cli interface (`c363bc5`_) .. _c363bc5: https://github.com/python-semantic-release/python-semantic-release/commit/c363bc5d3cb9e9a113de3cd0c49dd54a5ea9cf35 .. _changelog-v0.4.0: v0.4.0 (2015-07-28) =================== ✨ Features ----------- * Add publish command (`d8116c9`_) .. _d8116c9: https://github.com/python-semantic-release/python-semantic-release/commit/d8116c9dec472d0007973939363388d598697784 .. _changelog-v0.3.2: v0.3.2 (2015-07-28) =================== * No change .. _changelog-v0.3.1: v0.3.1 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix wheel settings (`1e860e8`_) .. _1e860e8: https://github.com/python-semantic-release/python-semantic-release/commit/1e860e8a4d9ec580449a0b87be9660a9482fa2a4 .. _changelog-v0.3.0: v0.3.0 (2015-07-27) =================== ✨ Features ----------- * Add support for tagging releases (`5f4736f`_) 🪲 Bug Fixes ------------ * Fix issue when version should not change (`441798a`_) .. _441798a: https://github.com/python-semantic-release/python-semantic-release/commit/441798a223195138c0d3d2c51fc916137fef9a6c .. _5f4736f: https://github.com/python-semantic-release/python-semantic-release/commit/5f4736f4e41bc96d36caa76ca58be0e1e7931069 .. _changelog-v0.2.0: v0.2.0 (2015-07-27) =================== ✨ Features ----------- * added no-operation (``--noop``) mode (`44c2039`_) âš™ï¸ Build System --------------- * Swapped pygit2 with gitpython to avoid libgit2 dependency (`8165a2e`_) .. _44c2039: https://github.com/python-semantic-release/python-semantic-release/commit/44c203989aabc9366ba42ed2bc40eaccd7ac891c .. _8165a2e: https://github.com/python-semantic-release/python-semantic-release/commit/8165a2eef2c6eea88bfa52e6db37abc7374cccba .. _changelog-v0.1.1: v0.1.1 (2015-07-27) =================== 🪲 Bug Fixes ------------ * Fix entry point (`bd7ce7f`_) .. _bd7ce7f: https://github.com/python-semantic-release/python-semantic-release/commit/bd7ce7f47c49e2027767fb770024a0d4033299fa .. _changelog-v0.1.0: v0.1.0 (2015-07-27) =================== * Initial Release python-semantic-release-10.4.1/CONTRIBUTING.rst000066400000000000000000000051151506116242600210030ustar00rootroot00000000000000.. _contributing_guide: Contributing ------------ If you want to contribute that is awesome. Remember to be nice to others in issues and reviews. Please remember to write tests for the cool things you create or fix. Unsure about something? No worries, `open an issue`_. .. _open an issue: https://github.com/python-semantic-release/python-semantic-release/issues/new Commit messages ~~~~~~~~~~~~~~~ Since python-semantic-release is released with python-semantic-release we need the commit messages to adhere to the `Conventional Commits Specification`_. Although scopes are optional, scopes are expected where applicable. Changes should be committed separately with the commit type they represent, do not combine them all into one commit. If you are unsure how to describe the change correctly just try and ask about it in your pr. If we think it should be something else or there is a pull-request without tags we will help out in adding or changing them. .. _Conventional Commits Specification: https://www.conventionalcommits.org/en/v1.0.0 Releases ~~~~~~~~ This package is released by python-semantic-release on each master build, thus if there are changes that should result in a new release it will happen if the build is green. Development ~~~~~~~~~~~ Install this module and the development dependencies .. code-block:: bash pip install -e .[dev,mypy,test] And if you'd like to build the documentation locally .. code-block:: bash pip install -e .[docs] sphinx-autobuild --open-browser docs docs/_build/html Testing ~~~~~~~ To test your modifications locally: .. code-block:: bash # Run type-checking, all tests across all supported Python versions tox # Run all tests for your current installed Python version (with full error output) pytest -vv --comprehensive # Run unit tests for your current installed Python version pytest # or pytest -vv -m unit # Run end-to-end tests for your current installed Python version (with full error output) pytest -vv -m e2e [--comprehensive] The ``--comprehensive`` flag is optional and will run all the variations of tests and it does take significantly longer to run. Building ~~~~~~~~ This project is designed to be versioned and built by itself using the ``tool.semantic_release`` configuration in ``pyproject.toml``. The setting ``tool.semantic_release.build_command`` defines the command to run to build the package. The following is a copy of the ``build_command`` setting which can be run manually to build the package locally: .. code-block:: bash pip install -e .[build] python -m build . python-semantic-release-10.4.1/LICENSE000066400000000000000000000020731506116242600173470ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2015 Rolf Erik Lekang Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. python-semantic-release-10.4.1/MANIFEST.in000066400000000000000000000004071506116242600200770ustar00rootroot00000000000000# Make sure non-python files are included graft src/**/data/ # include docs & testing into sdist, ignored for wheel build graft docs/ prune docs/_build/ graft tests/ prune tests/gh_action/ # Remove any generated files prune **/__pycache__/ prune src/*.egg-info python-semantic-release-10.4.1/README.rst000066400000000000000000000015571506116242600200370ustar00rootroot00000000000000Python Semantic Release *********************** *Automating Releases via SemVer and Commit Message Conventions* ---- The official documentation for Python Semantic Release can be found at `python-semantic-release.readthedocs.io`_. GitHub Action ============= When using the Python Semantic Release GitHub Action, it executes the command ``semantic-release version`` using `python-semantic-release`_. The usage information and examples for this GitHub Action is available under the `GitHub Actions section`_ of `python-semantic-release.readthedocs.io`_. .. _python-semantic-release: https://pypi.org/project/python-semantic-release/ .. _python-semantic-release.readthedocs.io: https://python-semantic-release.readthedocs.io/en/stable/ .. _GitHub Actions section: https://python-semantic-release.readthedocs.io/en/stable/configuration/automatic-releases/github-actions.html python-semantic-release-10.4.1/action.yml000066400000000000000000000113541506116242600203440ustar00rootroot00000000000000--- name: Python Semantic Release description: Automated Releases via SemVer and Commit Message Conventions branding: color: orange inputs: config_file: default: "" required: false description: | Path to a custom semantic-release configuration file. By default, an empty string will look for a pyproject.toml file in the current directory. This is the same as passing the `-c` or `--config` parameter to semantic-release. directory: default: "." required: false description: Sub-directory to cd into before running semantic-release github_token: required: true description: GitHub token used to push release notes and new commits/tags git_committer_name: required: false description: The human name for the “committer†field git_committer_email: required: false description: The email address for the “committer†field no_operation_mode: default: "false" required: false description: | If set to true, the github action will pass the `--noop` parameter to semantic-release. This will cause semantic-release to run in "no operation" mode. See the documentation for more information on this parameter. Note that, this parameter will not affect the output of the action, so you will still get the version determination decision as output values. ssh_public_signing_key: required: false description: The ssh public key used to sign commits ssh_private_signing_key: required: false description: The ssh private key used to sign commits strict: default: "false" required: false description: | If set to true, the github action will pass the `--strict` parameter to semantic-release. See the documentation for more information on this parameter. verbosity: default: "1" required: false description: | Set the verbosity level of the output as the number of -v's to pass to semantic-release. 0 is no extra output, 1 is info level output, 2 is debug output, and 3 is silly debug level of output. # `semantic-release version` command line options prerelease: required: false description: | Force the next version to be a prerelease. Set to "true" or "false". prerelease_token: required: false description: "Force the next version to use this prerelease token, if it is a prerelease" force: required: false description: | Force the next version to be a major release. Must be set to one of "prerelease", "patch", "minor", or "major". commit: required: false description: Whether or not to commit changes locally. Defaults are handled by python-semantic-release internal version command. tag: required: false description: | Whether or not to make a local version tag. Defaults are handled by python-semantic-release internal version command. push: required: false description: | Whether or not to push local commits to the Git repository. See the configuration page for defaults of `semantic-release version` for how the default is determined between push, tag, & commit. changelog: required: false description: | Whether or not to update the changelog. vcs_release: required: false description: | Whether or not to create a release in the remote VCS, if supported build: required: false description: | Whether or not to run the build_command for the project. Defaults are handled by python-semantic-release internal version command. build_metadata: required: false description: | Build metadata to append to the new version outputs: commit_sha: description: | The commit SHA of the release if a release was made, otherwise an empty string is_prerelease: description: | "true" if the version is a prerelease, "false" otherwise link: description: | The link to the release in the remote VCS, if a release was made. If no release was made, this will be an empty string. previous_version: description: | The previous version before the release, if a release was or will be made. If no release is detected, this will be the current version or an empty string if no previous version exists. released: description: | "true" if a release was made, "false" otherwise release_notes: description: | The release notes generated by the release, if any. If no release was made, this will be an empty string. tag: description: | The Git tag corresponding to the version output version: description: | The newly released version if one was made, otherwise the current version runs: using: docker image: src/gh_action/Dockerfile python-semantic-release-10.4.1/config/000077500000000000000000000000001506116242600176055ustar00rootroot00000000000000python-semantic-release-10.4.1/config/release-templates/000077500000000000000000000000001506116242600232215ustar00rootroot00000000000000python-semantic-release-10.4.1/config/release-templates/.components/000077500000000000000000000000001506116242600254645ustar00rootroot00000000000000python-semantic-release-10.4.1/config/release-templates/.components/changelog_1.0.0.rst.j2000066400000000000000000000143771506116242600312070ustar00rootroot00000000000000{# This file overrides what would be generated normally because the commits are not conformative to the standard commit message format. #} .. _changelog-v1.0.0: v1.0.0 (2015-08-04) =================== 💥 Breaking ----------- * Restructure helpers into history and pypi (`00f64e6`_) 📖 Documentation ---------------- * Add automatic publishing documentation, resolves `#18`_ (`58076e6`_) .. _#18: https://github.com/python-semantic-release/python-semantic-release/issues/18 .. _00f64e6: https://github.com/python-semantic-release/python-semantic-release/commit/00f64e623db0e21470d55488c5081e12d6c11fd3 .. _58076e6: https://github.com/python-semantic-release/python-semantic-release/commit/58076e60bf20a5835b112b5e99a86c7425ffe7d9 .. _changelog-v0.9.1: v0.9.1 (2015-08-04) =================== 🪲 Bug Fixes ------------ * Fix ``get_current_head_hash`` to ensure it only returns the hash (`7c28832`_) .. _7c28832: https://github.com/python-semantic-release/python-semantic-release/commit/7c2883209e5bf4a568de60dbdbfc3741d34f38b4 .. _changelog-v0.9.0: v0.9.0 (2015-08-03) =================== ✨ Features ----------- * Add Python 2.7 support, resolves `#10`_ (`c05e13f`_) .. _#10: https://github.com/python-semantic-release/python-semantic-release/issues/10 .. _c05e13f: https://github.com/python-semantic-release/python-semantic-release/commit/c05e13f22163237e963c493ffeda7e140f0202c6 .. _changelog-v0.8.0: v0.8.0 (2015-08-03) =================== ✨ Features ----------- * Add ``check_build_status`` option, resolves `#5`_ (`310bb93`_) * Add ``get_current_head_hash`` in git helpers (`d864282`_) * Add git helper to get owner and name of repo (`f940b43`_) .. _#5: https://github.com/python-semantic-release/python-semantic-release/issues/5 .. _310bb93: https://github.com/python-semantic-release/python-semantic-release/commit/310bb9371673fcf9b7b7be48422b89ab99753f04 .. _d864282: https://github.com/python-semantic-release/python-semantic-release/commit/d864282c498f0025224407b3eeac69522c2a7ca0 .. _f940b43: https://github.com/python-semantic-release/python-semantic-release/commit/f940b435537a3c93ab06170d4a57287546bd8d3b .. _changelog-v0.7.0: v0.7.0 (2015-08-02) =================== ✨ Features ----------- * Add ``patch_without_tag`` option, resolves `#6`_ (`3734a88`_) 📖 Documentation ---------------- * Set up sphinx based documentation, resolves `#1`_ (`41fba78`_) .. _#1: https://github.com/python-semantic-release/python-semantic-release/issues/1 .. _#6: https://github.com/python-semantic-release/python-semantic-release/issues/6 .. _3734a88: https://github.com/python-semantic-release/python-semantic-release/commit/3734a889f753f1b9023876e100031be6475a90d1 .. _41fba78: https://github.com/python-semantic-release/python-semantic-release/commit/41fba78a389a8d841316946757a23a7570763c39 .. _changelog-v0.6.0: v0.6.0 (2015-08-02) =================== ✨ Features ----------- * Add twine for uploads to pypi, resolves `#13`_ (`eec2561`_) .. _#13: https://github.com/python-semantic-release/python-semantic-release/issues/13 .. _eec2561: https://github.com/python-semantic-release/python-semantic-release/commit/eec256115b28b0a18136a26d74cfc3232502f1a6 .. _changelog-v0.5.4: v0.5.4 (2015-07-29) =================== 🪲 Bug Fixes ------------ * Add python2 not supported warning (`e84c4d8`_) .. _e84c4d8: https://github.com/python-semantic-release/python-semantic-release/commit/e84c4d8b6f212aec174baccd188185627b5039b6 .. _changelog-v0.5.3: v0.5.3 (2015-07-28) =================== âš™ï¸ Build System --------------- * Add ``wheel`` as a dependency (`971e479`_) .. _971e479: https://github.com/python-semantic-release/python-semantic-release/commit/971e4795a8b8fea371fcc02dc9221f58a0559f32 .. _changelog-v0.5.2: v0.5.2 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix python wheel tag (`f9ac163`_) .. _f9ac163: https://github.com/python-semantic-release/python-semantic-release/commit/f9ac163491666022c809ad49846f3c61966e10c1 .. _changelog-v0.5.1: v0.5.1 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix push commands (`8374ef6`_) .. _8374ef6: https://github.com/python-semantic-release/python-semantic-release/commit/8374ef6bd78eb564a6d846b882c99a67e116394e .. _changelog-v0.5.0: v0.5.0 (2015-07-28) =================== ✨ Features ----------- * Add setup.py hook for the cli interface (`c363bc5`_) .. _c363bc5: https://github.com/python-semantic-release/python-semantic-release/commit/c363bc5d3cb9e9a113de3cd0c49dd54a5ea9cf35 .. _changelog-v0.4.0: v0.4.0 (2015-07-28) =================== ✨ Features ----------- * Add publish command (`d8116c9`_) .. _d8116c9: https://github.com/python-semantic-release/python-semantic-release/commit/d8116c9dec472d0007973939363388d598697784 .. _changelog-v0.3.2: v0.3.2 (2015-07-28) =================== * No change .. _changelog-v0.3.1: v0.3.1 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix wheel settings (`1e860e8`_) .. _1e860e8: https://github.com/python-semantic-release/python-semantic-release/commit/1e860e8a4d9ec580449a0b87be9660a9482fa2a4 .. _changelog-v0.3.0: v0.3.0 (2015-07-27) =================== ✨ Features ----------- * Add support for tagging releases (`5f4736f`_) 🪲 Bug Fixes ------------ * Fix issue when version should not change (`441798a`_) .. _441798a: https://github.com/python-semantic-release/python-semantic-release/commit/441798a223195138c0d3d2c51fc916137fef9a6c .. _5f4736f: https://github.com/python-semantic-release/python-semantic-release/commit/5f4736f4e41bc96d36caa76ca58be0e1e7931069 .. _changelog-v0.2.0: v0.2.0 (2015-07-27) =================== ✨ Features ----------- * added no-operation (``--noop``) mode (`44c2039`_) âš™ï¸ Build System --------------- * Swapped pygit2 with gitpython to avoid libgit2 dependency (`8165a2e`_) .. _44c2039: https://github.com/python-semantic-release/python-semantic-release/commit/44c203989aabc9366ba42ed2bc40eaccd7ac891c .. _8165a2e: https://github.com/python-semantic-release/python-semantic-release/commit/8165a2eef2c6eea88bfa52e6db37abc7374cccba .. _changelog-v0.1.1: v0.1.1 (2015-07-27) =================== 🪲 Bug Fixes ------------ * Fix entry point (`bd7ce7f`_) .. _bd7ce7f: https://github.com/python-semantic-release/python-semantic-release/commit/bd7ce7f47c49e2027767fb770024a0d4033299fa python-semantic-release-10.4.1/config/release-templates/.components/changelog_header.rst.j2000066400000000000000000000004071506116242600317700ustar00rootroot00000000000000.. _changelog: {% if ctx.changelog_mode == "update" %}{# # Modified insertion flag to insert a changelog header directly # which convienently puts the insertion flag incognito when reading raw RST #}{{ insertion_flag ~ "\n" }}{% endif %} python-semantic-release-10.4.1/config/release-templates/.components/changelog_init.rst.j2000066400000000000000000000022721506116242600315050ustar00rootroot00000000000000{# This changelog template initializes a full changelog for the project, it follows the following logic: 1. Header 2. Any Unreleased Details (uncommon) 3. all previous releases except the very first release 4. the first release #}{# # # Header #}{% include "changelog_header.rst.j2" -%}{# # # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.rst.j2" -%}{# # # Since this is initialization, we are generating all the previous # # release notes per version. The very first release notes is specialized. # # We also have non-conformative commits, so insert manual write-ups. #}{% if releases | length > 0 %}{% for release in releases %}{% if loop.last %}{{ "\n" }}{% include "first_release.rst.j2" -%}{{ "\n" }}{# #}{% elif release.version == "1.0.0" %}{# # Append 0.1.1 through 1.0.0 non-generated changelog only once #}{{ "\n" }}{% include "changelog_1.0.0.rst.j2" -%}{{ "\n\n" }}{# #}{% elif release.version > "1.0.0" %}{{ "\n" }}{% include "versioned_changes.rst.j2" -%}{{ "\n" }}{% endif %}{% endfor %}{% endif %} python-semantic-release-10.4.1/config/release-templates/.components/changelog_update.rst.j2000066400000000000000000000050541506116242600320250ustar00rootroot00000000000000{# This Update changelog template uses the following logic: 1. Read previous changelog file (ex. project_root/CHANGELOG.md) 2. Split on insertion flag (ex. ) 3. Print top half of previous changelog 3. New Changes (unreleased commits & newly released) 4. Print bottom half of previous changelog Note: if a previous file was not found, it does not write anything at the bottom but render does NOT fail #}{% set prev_changelog_contents = prev_changelog_file | read_file | safe %}{% set changelog_parts = prev_changelog_contents.split(insertion_flag, maxsplit=1) %}{# #}{% if changelog_parts | length < 2 %}{# # insertion flag was not found, check if the file was empty or did not exist #}{% if prev_changelog_contents | length > 0 %}{# # File has content but no insertion flag, therefore, file will not be updated #}{{ changelog_parts[0] }}{% else %}{# # File was empty or did not exist, therefore, it will be created from scratch #}{% include "changelog_init.rst.j2" %}{% endif %}{% else %}{# # Previous Changelog Header # - Depending if there is header content, then it will separate the insertion flag # with a newline from header content, otherwise it will just print the insertion flag #}{% set prev_changelog_top = changelog_parts[0] | trim %}{% if prev_changelog_top | length > 0 %}{{ "%s\n\n%s\n" | format(prev_changelog_top, insertion_flag | trim) }}{% else %}{{ "%s\n" | format(insertion_flag | trim) }}{% endif %}{# # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.rst.j2" -%}{# #}{% if releases | length > 0 %}{# # Latest Release Details #}{% set release = releases[0] %}{# #}{% if releases | length == 1 and ctx.mask_initial_release %}{# # First Release detected #}{{ "\n" }}{%- include "first_release.rst.j2" -%}{{ "\n" }}{# #}{% elif release.version.as_semver_tag() ~ " (" not in changelog_parts[1] %}{# # The release version is not already in the changelog so we add it #}{{ "\n" }}{%- include "versioned_changes.rst.j2" -%}{{ "\n" }}{# #}{% endif %}{% endif %}{# # Previous Changelog Footer # - skips printing footer if empty, which happens when the insertion_flag # was at the end of the file (ignoring whitespace) #}{% set previous_changelog_bottom = changelog_parts[1] | trim %}{% if previous_changelog_bottom | length > 0 %}{{ "\n%s\n" | format(previous_changelog_bottom) }}{% endif %}{% endif %} python-semantic-release-10.4.1/config/release-templates/.components/changes.md.j2000066400000000000000000000116721506116242600277370ustar00rootroot00000000000000{% from 'macros.common.j2' import apply_alphabetical_ordering_by_brk_descriptions %}{% from 'macros.common.j2' import apply_alphabetical_ordering_by_descriptions %}{% from 'macros.common.j2' import apply_alphabetical_ordering_by_release_notices %}{% from 'macros.common.j2' import emoji_map, format_breaking_changes_description %}{% from 'macros.common.j2' import format_release_notice, section_heading_order %}{% from 'macros.common.j2' import section_heading_translations %}{% from 'macros.md.j2' import format_commit_summary_line %}{# EXAMPLE: ### ✨ Features - Add new feature ([#10](https://domain.com/namespace/repo/pull/10), [`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) - **scope**: Add new feature ([`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) ### 🪲 Bug Fixes - Fix bug ([#11](https://domain.com/namespace/repo/pull/11), [`abcdef1`](https://domain.com/namespace/repo/commit/HASH)) ### 💥 Breaking Changes - With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. - **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. ### 💡 Additional Release Information - This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. - **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. #}{% set max_line_width = max_line_width | default(100) %}{% set hanging_indent = hanging_indent | default(2) %}{# #}{% for type_ in section_heading_order if type_ in commit_objects %}{# PREPROCESS COMMITS (order by description & format description line) #}{% set ns = namespace(commits=commit_objects[type_]) %}{% set _ = apply_alphabetical_ordering_by_descriptions(ns) %}{# #}{% set commit_descriptions = [] %}{# #}{% for commit in ns.commits %}{# # Generate the commit summary line and format it for Markdown #}{% set description = "- %s" | format(format_commit_summary_line(commit)) %}{% set description = description | autofit_text_width(max_line_width, hanging_indent) %}{% set _ = commit_descriptions.append(description) %}{% endfor %}{# # # PRINT SECTION (header & commits) #}{{ "\n" }}{{ "### %s %s\n" | format(emoji_map[type_], type_ | title) }}{{ "\n" }}{{ "%s\n" | format(commit_descriptions | unique | join("\n\n")) }}{% endfor %}{# # # Determine if any commits have a breaking change or release notice # # commit_objects is a dictionary of strings to a list of commits { "features", [ParsedCommit(), ...] } #}{% set breaking_commits = [] %}{% set notice_commits = [] %}{% for commits in commit_objects.values() %}{% set valid_commits = commits | rejectattr("error", "defined") | list %}{# # Filter out breaking change commits that have no breaking descriptions #}{% set _ = breaking_commits.extend( valid_commits | selectattr("breaking_descriptions.0") ) %}{# # Filter out ParsedCommits commits that have no release notices #}{% set _ = notice_commits.extend( valid_commits | selectattr("release_notices.0") ) %}{% endfor %}{# #}{% if breaking_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set brk_ns = namespace(commits=breaking_commits) %}{% set _ = apply_alphabetical_ordering_by_brk_descriptions(brk_ns) %}{# #}{% set brking_descriptions = [] %}{# #}{% for commit in brk_ns.commits %}{% set full_description = "- %s" | format( format_breaking_changes_description(commit).split("\n\n") | join("\n\n- ") ) %}{% set _ = brking_descriptions.append( full_description | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT BREAKING CHANGE DESCRIPTIONS (header & descriptions) #}{{ "\n" }}{{ "### %s Breaking Changes\n" | format(emoji_map["breaking"]) }}{{ "\n%s\n" | format(brking_descriptions | unique | join("\n\n")) }}{# #}{% endif %}{# #}{% if notice_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set notice_ns = namespace(commits=notice_commits) %}{% set _ = apply_alphabetical_ordering_by_release_notices(notice_ns) %}{# #}{% set release_notices = [] %}{# #}{% for commit in notice_ns.commits %}{% set full_description = "- %s" | format( format_release_notice(commit).split("\n\n") | join("\n\n- ") ) %}{% set _ = release_notices.append( full_description | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT RELEASE NOTICE INFORMATION (header & descriptions) #}{{ "\n" }}{{ "### %s Additional Release Information\n" | format(emoji_map["release_note"]) }}{{ "\n%s\n" | format(release_notices | unique | join("\n\n")) }}{# #}{% endif %} python-semantic-release-10.4.1/config/release-templates/.components/changes.rst.j2000066400000000000000000000153751506116242600301530ustar00rootroot00000000000000{% from 'macros.common.j2' import apply_alphabetical_ordering_by_brk_descriptions %}{% from 'macros.common.j2' import apply_alphabetical_ordering_by_descriptions %}{% from 'macros.common.j2' import apply_alphabetical_ordering_by_release_notices %}{% from 'macros.common.j2' import emoji_map, format_breaking_changes_description %}{% from 'macros.common.j2' import format_release_notice, section_heading_order %}{% from 'macros.common.j2' import section_heading_translations %}{% from 'macros.rst.j2' import extract_issue_link_references, extract_pr_link_reference %}{% from 'macros.rst.j2' import format_commit_summary_line, format_link_reference %}{% from 'macros.rst.j2' import generate_heading_underline %}{# ✨ Features ----------- * Add new feature (`#10`_, `8a7b8ec`_) * **scope**: Add another feature (`abcdef0`_) 🪲 Bug Fixes ------------ * Fix bug (`#11`_, `8a7b8ec`_) 💥 Breaking Changes ------------------- * With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. * **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. 💡 Additional Release Information --------------------------------- * This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. * **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. .. _8a7B8ec: https://domain.com/owner/repo/commit/8a7b8ec .. _abcdef0: https://domain.com/owner/repo/commit/abcdef0 .. _PR#10: https://domain.com/namespace/repo/pull/10 .. _PR#11: https://domain.com/namespace/repo/pull/11 #}{% set max_line_width = max_line_width | default(100) %}{% set hanging_indent = hanging_indent | default(2) %}{# #}{% set post_paragraph_links = [] %}{# #}{% for type_ in section_heading_order if type_ in commit_objects %}{# # PREPARE SECTION HEADER #}{% set section_header = "%s %s" | format( emoji_map[type_], type_ | title ) %}{# # # PREPROCESS COMMITS #}{% set ns = namespace(commits=commit_objects[type_]) %}{% set _ = apply_alphabetical_ordering_by_descriptions(ns) %}{# #}{% set commit_descriptions = [] %}{# #}{% for commit in ns.commits %}{# # Extract PR/MR reference if it exists and store it for later #}{% set pr_link_reference = extract_pr_link_reference(commit) | default("", true) %}{% if pr_link_reference != "" %}{% set _ = post_paragraph_links.append(pr_link_reference) %}{% endif %}{# # # Extract Issue references if they exists and store it for later #}{% set issue_urls_ns = namespace(urls=[]) %}{% set _ = extract_issue_link_references(issue_urls_ns, commit) %}{% set _ = post_paragraph_links.extend(issue_urls_ns.urls) %}{# # # Always generate a commit hash reference link and store it for later #}{% set commit_hash_link_reference = format_link_reference( commit.hexsha | commit_hash_url, commit.short_hash ) %}{% set _ = post_paragraph_links.append(commit_hash_link_reference) %}{# # # Generate the commit summary line and format it for RST #}{% set description = "* %s" | format(format_commit_summary_line(commit)) %}{% set description = description | convert_md_to_rst %}{% set description = description | autofit_text_width(max_line_width, hanging_indent) %}{% set _ = commit_descriptions.append(description) %}{% endfor %}{# # # PRINT SECTION (Header & Commits) # Note: Must add an additional character to the section header when determining the underline because of # the emoji character which can serve as 2 characters in length. #}{{ "\n" }}{{ section_header ~ "\n" }}{{ generate_heading_underline(section_header ~ " ", '-') ~ "\n" }}{{ "\n%s\n" | format(commit_descriptions | unique | join("\n\n")) }}{% endfor %}{# # # Determine if any commits have a breaking change or release notice # # commit_objects is a dictionary of strings to a list of commits { "features", [ParsedCommit(), ...] } #}{% set breaking_commits = [] %}{% set notice_commits = [] %}{% for commits in commit_objects.values() %}{% set valid_commits = commits | rejectattr("error", "defined") | list %}{# # Filter out breaking change commits that have no breaking descriptions #}{% set _ = breaking_commits.extend( valid_commits | selectattr("breaking_descriptions.0") ) %}{# # Filter out ParsedCommits commits that have no release notices #}{% set _ = notice_commits.extend( valid_commits | selectattr("release_notices.0") ) %}{% endfor %}{# #}{% if breaking_commits | length > 0 %}{# # PREPROCESS COMMITS #}{% set brk_ns = namespace(commits=breaking_commits) %}{% set _ = apply_alphabetical_ordering_by_brk_descriptions(brk_ns) %}{# #}{% set brking_descriptions = [] %}{# #}{% for commit in brk_ns.commits %}{% set full_description = "* %s" | format( format_breaking_changes_description(commit).split("\n\n") | join("\n\n* ") ) %}{% set _ = brking_descriptions.append( full_description | convert_md_to_rst | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT BREAKING CHANGE DESCRIPTIONS (header & descriptions) #}{{ "\n" }}{{ "%s Breaking Changes\n" | format(emoji_map["breaking"]) }}{{ '-------------------\n' }}{{ "\n%s\n" | format(brking_descriptions | unique | join("\n\n")) }}{# #}{% endif %}{# #}{% if notice_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set notice_ns = namespace(commits=notice_commits) %}{% set _ = apply_alphabetical_ordering_by_release_notices(notice_ns) %}{# #}{% set release_notices = [] %}{# #}{% for commit in notice_ns.commits %}{% set full_description = "* %s" | format( format_release_notice(commit).split("\n\n") | join("\n\n* ") ) %}{% set _ = release_notices.append( full_description | convert_md_to_rst | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT RELEASE NOTICE INFORMATION (header & descriptions) #}{{ "\n" }}{{ "%s Additional Release Information\n" | format(emoji_map["release_note"]) }}{{ "---------------------------------\n" }}{{ "\n%s\n" | format(release_notices | unique | join("\n\n")) }}{# #}{% endif %}{# # # # PRINT POST PARAGRAPH LINKS #}{% if post_paragraph_links | length > 0 %}{# # Print out any PR/MR or Issue URL references that were found in the commit messages #}{{ "\n%s\n" | format(post_paragraph_links | unique | sort | join("\n")) }}{% endif %} python-semantic-release-10.4.1/config/release-templates/.components/first_release.md.j2000066400000000000000000000006451506116242600311540ustar00rootroot00000000000000{# EXAMPLE: ## vX.X.X (YYYY-MMM-DD) _This release is published under the MIT License._ # Release Notes Only - Initial Release #}{{ "## %s (%s)\n" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) }}{% if license_name is defined and license_name %}{{ "\n_This release is published under the %s License._\n" | format(license_name) }}{% endif %} - Initial Release python-semantic-release-10.4.1/config/release-templates/.components/first_release.rst.j2000066400000000000000000000007161506116242600313630ustar00rootroot00000000000000{% from "macros.rst.j2" import generate_heading_underline %}{# .. _changelog-vX.X.X: vX.X.X (YYYY-MMM-DD) ==================== * Initial Release #}{% set version_header = "%s (%s)" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) %} {{- ".. _changelog-%s:" | format(release.version.as_semver_tag()) }} {{ version_header }} {{ generate_heading_underline(version_header, "=") }} * Initial Release python-semantic-release-10.4.1/config/release-templates/.components/macros.common.j2000066400000000000000000000123341506116242600304770ustar00rootroot00000000000000{# TODO: move to configuration for user to modify #} {% set section_heading_translations = { 'feat': 'features', 'fix': 'bug fixes', 'perf': 'performance improvements', 'docs': 'documentation', 'build': 'build system', 'refactor': 'refactoring', 'test': 'testing', 'ci': 'continuous integration', 'chore': 'chores', 'style': 'code style', } %} {% set section_heading_order = section_heading_translations.values() %} {% set emoji_map = { 'breaking': '💥', 'features': '✨', 'bug fixes': '🪲', 'performance improvements': 'âš¡', 'documentation': '📖', 'build system': 'âš™ï¸', 'refactoring': 'â™»ï¸', 'testing': '✅', 'continuous integration': '🤖', 'chores': '🧹', 'code style': '🎨', 'unknown': 'â—', 'release_note': '💡', } %} {# MACRO: Capitalize the first letter of a string only #}{% macro capitalize_first_letter_only(sentence) %}{{ (sentence[0] | upper) ~ sentence[1:] }}{% endmacro %} {# MACRO: format a commit descriptions list by: - Capitalizing the first line of the description - Adding an optional scope prefix - Joining the rest of the descriptions with a double newline #}{% macro format_attr_paragraphs(commit, attribute) %}{# NOTE: requires namespace because of the way Jinja2 handles variable scoping with loops #}{% set ns = namespace(full_description="") %}{# #}{% if commit.error is undefined %}{% for paragraph in commit | attr(attribute) %}{% if paragraph | trim | length > 0 %}{# #}{% set ns.full_description = [ ns.full_description, capitalize_first_letter_only(paragraph) | trim | safe, ] | join("\n\n") %}{# #}{% endif %}{% endfor %}{# #}{% set ns.full_description = ns.full_description | trim %}{# #}{% if commit.scope %}{% set ns.full_description = "**%s**: %s" | format( commit.scope, ns.full_description ) %}{% endif %}{% endif %}{# #}{{ ns.full_description }}{% endmacro %} {# MACRO: format the breaking changes description by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_breaking_changes_description(commit) %}{{ format_attr_paragraphs(commit, 'breaking_descriptions') }}{% endmacro %} {# MACRO: format the release notice by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_release_notice(commit) %}{{ format_attr_paragraphs(commit, "release_notices") }}{% endmacro %} {# MACRO: order commits alphabetically by scope and attribute - Commits are sorted based on scope and then the attribute alphabetically - Commits without scope are placed first and sorted alphabetically by the attribute - parameter: ns (namespace) object with a commits list - parameter: attr (string) attribute to sort by - returns None but modifies the ns.commits list in place #}{% macro order_commits_alphabetically_by_scope_and_attr(ns, attr) %}{% set ordered_commits = [] %}{# # # Eliminate any ParseError commits from input set #}{% set filtered_commits = ns.commits | rejectattr("error", "defined") | list %}{# # # grab all commits with no scope and sort alphabetically by attr #}{% for commit in filtered_commits | rejectattr("scope") | sort(attribute=attr) %}{% set _ = ordered_commits.append(commit) %}{% endfor %}{# # # grab all commits with a scope and sort alphabetically by the scope and then attr #}{% for commit in filtered_commits | selectattr("scope") | sort(attribute=(['scope', attr] | join(","))) %}{% set _ = ordered_commits.append(commit) %}{% endfor %}{# # # Return the ordered commits #}{% set ns.commits = ordered_commits %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized summaries and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_descriptions(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'descriptions.0') %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized breaking changes and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_brk_descriptions(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'breaking_descriptions.0') %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized release notices and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_release_notices(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'release_notices.0') %}{% endmacro %} python-semantic-release-10.4.1/config/release-templates/.components/macros.md.j2000066400000000000000000000041001506116242600275770ustar00rootroot00000000000000{% from 'macros.common.j2' import capitalize_first_letter_only %} {# MACRO: format a inline link reference in Markdown #}{% macro format_link(link, label) %}{{ "[%s](%s)" | format(label, link) }}{% endmacro %} {# MACRO: commit message links or PR/MR links of commit #}{% macro commit_msg_links(commit) %}{% if commit.error is undefined %}{# # # Initialize variables #}{% set link_references = [] %}{% set summary_line = capitalize_first_letter_only( commit.descriptions[0] | safe ) %}{# #}{% if commit.linked_merge_request != "" %}{# # Add PR references with a link to the PR #}{% set _ = link_references.append( format_link( commit.linked_merge_request | pull_request_url, "PR" ~ commit.linked_merge_request ) ) %}{% endif %}{# # # DEFAULT: Always include the commit hash as a link #}{% set _ = link_references.append( format_link( commit.hexsha | commit_hash_url, "`%s`" | format(commit.short_hash) ) ) %}{# #}{% set formatted_links = "" %}{% if link_references | length > 0 %}{% set formatted_links = " (%s)" | format(link_references | join(", ")) %}{% endif %}{# # Return the modified summary_line #}{{ summary_line ~ formatted_links }}{% endif %}{% endmacro %} {# MACRO: format commit summary line #}{% macro format_commit_summary_line(commit) %}{# # Check for Parsing Error #}{% if commit.error is undefined %}{# # # Add any message links to the commit summary line #}{% set summary_line = commit_msg_links(commit) %}{# #}{% if commit.scope %}{% set summary_line = "**%s**: %s" | format(commit.scope, summary_line) %}{% endif %}{# # # Return the modified summary_line #}{{ summary_line }}{# #}{% else %}{# # Return the first line of the commit if there was a Parsing Error #}{{ (commit.commit.message | string).split("\n", maxsplit=1)[0] }}{% endif %}{% endmacro %} python-semantic-release-10.4.1/config/release-templates/.components/macros.rst.j2000066400000000000000000000073461506116242600300260ustar00rootroot00000000000000{% from 'macros.common.j2' import capitalize_first_letter_only %} {# MACRO: format a post-paragraph link reference in RST #}{% macro format_link_reference(link, label) %}{{ ".. _%s: %s" | format(label, link) }}{% endmacro %} {# MACRO: generate a heading underline that matches the exact length of the header #} {% macro generate_heading_underline(header, underline_char) %}{% set header_underline = [] %}{% for _ in header %}{% set __ = header_underline.append(underline_char) %}{% endfor %}{# # Print out the header underline #}{{ header_underline | join }}{% endmacro %} {# MACRO: formats a commit message for a non-inline RST link for a commit hash and/or PR/MR #}{% macro commit_msg_links(commit) %}{% if commit.error is undefined %}{# # # Initialize variables #}{% set closes_statement = "" %}{% set link_references = [] %}{% set summary_line = capitalize_first_letter_only( commit.descriptions[0] | safe ) %}{# #}{% if commit.linked_issues | length > 0 %}{% set closes_statement = ", closes `%s`_" | format( commit.linked_issues | join("`_, `") ) %}{% endif %}{# #}{% if commit.linked_merge_request != "" %}{# # Add PR references with a link to the PR #}{% set _ = link_references.append("`PR%s`_" | format(commit.linked_merge_request)) %}{% endif %}{# # DEFAULT: Always include the commit hash as a link #}{% set _ = link_references.append("`%s`_" | format(commit.short_hash)) %}{# #}{% set formatted_links = "" %}{% if link_references | length > 0 %}{% set formatted_links = " (%s)" | format(link_references | join(", ")) %}{% endif %}{# # Return the modified summary_line #}{{ summary_line ~ closes_statement ~ formatted_links }}{% endif %}{% endmacro %} {# MACRO: format commit summary line #}{% macro format_commit_summary_line(commit) %}{# # Check for Parsing Error #}{% if commit.error is undefined %}{# # # Add any message links to the commit summary line #}{% set summary_line = commit_msg_links(commit) %}{# #}{% if commit.scope %}{% set summary_line = "**%s**: %s" | format(commit.scope, summary_line) %}{% endif %}{# # # Return the modified summary_line #}{{ summary_line }}{# #}{% else %}{# # Return the first line of the commit if there was a Parsing Error #}{{ (commit.commit.message | string).split("\n", maxsplit=1)[0] }}{% endif %}{% endmacro %} {# MACRO: Extract issue references from a parsed commit object - Stores the issue urls in the namespace object #}{% macro extract_issue_link_references(ns, commit) %}{% set issue_urls = [] %}{# #}{% if commit.linked_issues is defined and commit.linked_issues | length > 0 %}{% for issue_num in commit.linked_issues %}{# # Create an issue reference url #}{% set _ = issue_urls.append( format_link_reference( issue_num | issue_url, issue_num, ) ) %}{% endfor %}{% endif %}{# # # Store the issue urls in the namespace object #}{% set ns.urls = issue_urls %}{% endmacro %} {# MACRO: Create & return an non-inline RST link from a commit message - Returns empty string if no PR/MR identifier is found #}{% macro extract_pr_link_reference(commit) %}{% if commit.error is undefined %}{% set summary_line = commit.descriptions[0] %}{# #}{% if commit.linked_merge_request != "" %}{# # Create a PR/MR reference url #}{{ format_link_reference( commit.linked_merge_request | pull_request_url, "PR" ~ commit.linked_merge_request, ) }}{% endif %}{% endif %}{% endmacro %} python-semantic-release-10.4.1/config/release-templates/.components/unreleased_changes.rst.j2000066400000000000000000000003121506116242600323430ustar00rootroot00000000000000{% if unreleased_commits | length > 0 %} .. _changelog-unreleased: Unreleased ========== {% set commit_objects = unreleased_commits %}{% include "changes.rst.j2" -%}{{ "\n" }}{% endif %} python-semantic-release-10.4.1/config/release-templates/.components/versioned_changes.md.j2000066400000000000000000000007531506116242600320130ustar00rootroot00000000000000{# EXAMPLE: ## vX.X.X (YYYY-MMM-DD) _This release is published under the MIT License._ # Release Notes Only {{ change_sections }} #}{{ "## %s (%s)\n" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) }}{% if license_name is defined and license_name %}{{ "\n_This release is published under the %s License._\n" | format(license_name) }}{% endif %}{# #}{% set commit_objects = release["elements"] %}{% include "changes.md.j2" -%} python-semantic-release-10.4.1/config/release-templates/.components/versioned_changes.rst.j2000066400000000000000000000010311506116242600322110ustar00rootroot00000000000000{% from 'macros.rst.j2' import generate_heading_underline %}{# .. _changelog-X.X.X: vX.X.X (YYYY-MMM-DD) ==================== {{ change_sections }} #}{% set version_header = "%s (%s)" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) %}{# #}{{ ".. _changelog-%s:" | format(release.version.as_semver_tag()) }} {{ version_header }} {{ generate_heading_underline(version_header, "=") }} {# #}{% set commit_objects = release["elements"] %}{% include "changes.rst.j2" -%} python-semantic-release-10.4.1/config/release-templates/.release_notes.md.j2000066400000000000000000000100531506116242600267620ustar00rootroot00000000000000{% from ".components/macros.md.j2" import format_link %}{# EXAMPLE: ## v1.0.0 (2020-01-01) _This release is published under the MIT License._ ### ✨ Features - Add new feature ([PR#10](https://domain.com/namespace/repo/pull/10), [`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) - **scope**: Add new feature ([`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) ### 🪲 Bug Fixes - Fix bug ([PR#11](https://domain.com/namespace/repo/pull/11), [`abcdef1`](https://domain.com/namespace/repo/commit/HASH)) ### 💥 Breaking Changes - With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. - **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. ### 💡 Additional Release Information - This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. - **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. ### ✅ Resolved Issues - [#000](https://domain.com/namespace/repo/issues/000): _Title_ --- **Detailed Changes**: [vX.X.X...vX.X.X](https://domain.com/namespace/repo/compare/vX.X.X...vX.X.X) --- **Installable artifacts are available from**: - [PyPi Registry](https://pypi.org/project/package_name/x.x.x) - [GitHub Release Assets](https://github.com/namespace/repo/releases/tag/vX.X.X) #}{# # Set line width to 1000 to avoid wrapping as GitHub will handle it #}{% set max_line_width = max_line_width | default(1000) %}{% set hanging_indent = hanging_indent | default(2) %}{% set license_name = license_name | default("", True) %}{% set releases = context.history.released.values() | list %}{% set curr_release_index = releases.index(release) %}{# #}{% if mask_initial_release and curr_release_index == releases | length - 1 %}{# # On a first release, generate our special message #}{% include ".components/first_release.md.j2" %}{% else %}{# # Not the first release so generate notes normally #}{% include ".components/versioned_changes.md.j2" -%}{# # # If there are any commits that resolve issues, list out the issues with links #}{% set issue_resolving_commits = [] %}{% for commits in release["elements"].values() %}{% set _ = issue_resolving_commits.extend( commits | rejectattr("error", "defined") | selectattr("linked_issues") ) %}{% endfor %}{% if issue_resolving_commits | length > 0 %}{{ "\n### ✅ Resolved Issues\n" }}{# #}{% set issue_numbers = [] %}{% for linked_issues in issue_resolving_commits | map(attribute="linked_issues") %}{% set _ = issue_numbers.extend(linked_issues) %}{% endfor %}{% for issue_num in issue_numbers | unique | sort_numerically %}{{ "\n- %s: _Title_\n" | format(format_link(issue_num | issue_url, issue_num)) }}{# #}{% endfor %}{% endif %}{# #}{% set prev_release_index = curr_release_index + 1 %}{# #}{% if 'compare_url' is filter and prev_release_index < releases | length %}{% set prev_version_tag = releases[prev_release_index].version.as_tag() %}{% set new_version_tag = release.version.as_tag() %}{% set version_compare_url = prev_version_tag | compare_url(new_version_tag) %}{% set detailed_changes_link = '[{}...{}]({})'.format( prev_version_tag, new_version_tag, version_compare_url ) %}{{ "\n" }}{{ "---\n" }}{{ "\n" }}{{ "**Detailed Changes**: %s" | format(detailed_changes_link) }}{{ "\n" }}{% endif %}{% endif %}{# #} --- **Installable artifacts are available from**: {{ "- %s" | format( format_link( repo_name | create_pypi_url(release.version | string), "PyPi Registry", ) ) }} {{ "- %s" | format( format_link( release.version.as_tag() | create_release_url, "{vcs_name} Release Assets" | format_w_official_vcs_name, ) ) }} python-semantic-release-10.4.1/config/release-templates/CHANGELOG.rst.j2000066400000000000000000000013501506116242600255530ustar00rootroot00000000000000{# This changelog template controls which changelog creation occurs based on which mode is provided. Modes: - init: Initialize a full changelog from scratch - update: Insert new version details where the placeholder exists in the current changelog #}{% set this_file = "CHANGELOG.rst" %}{% set insertion_flag = ctx.changelog_insertion_flag %}{% set unreleased_commits = ctx.history.unreleased %}{% set releases = ctx.history.released.values() | list %}{# #}{% if ctx.changelog_mode == "init" %}{% include ".components/changelog_init.rst.j2" %}{# #}{% elif ctx.changelog_mode == "update" %}{% set prev_changelog_file = this_file %}{% include ".components/changelog_update.rst.j2" %}{# #}{% endif %} python-semantic-release-10.4.1/docs/000077500000000000000000000000001506116242600172705ustar00rootroot00000000000000python-semantic-release-10.4.1/docs/Makefile000066400000000000000000000155101506116242600207320ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/python-semantic-release.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/python-semantic-release.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/python-semantic-release" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/python-semantic-release" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage livehtml: sphinx-autobuild "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) python-semantic-release-10.4.1/docs/api/000077500000000000000000000000001506116242600200415ustar00rootroot00000000000000python-semantic-release-10.4.1/docs/api/commands.rst000066400000000000000000000427321506116242600224040ustar00rootroot00000000000000.. _commands: Command Line Interface (CLI) ============================ All commands accept a ``-h/--help`` option, which displays the help text for the command and exits immediately. ``semantic-release`` does not allow interspersed arguments and options, which means that the options for ``semantic-release`` are not necessarily accepted one of the subcommands. In particular, the :ref:`cmd-main-option-noop` and :ref:`cmd-main-option-verbosity` flags must be given to the top-level ``semantic-release`` command, before the name of the subcommand. For example: Incorrect:: semantic-release version --print --noop -vv Correct:: semantic-release -vv --noop version --print With the exception of :ref:`cmd-main` and :ref:`cmd-generate-config`, all commands require that you have set up your project's configuration. To help with setting up your project configuration, :ref:`cmd-generate-config` will print out the default configuration to the console, which you can then modify it to match your project & environment. .. _cmd-main: ``semantic-release`` ~~~~~~~~~~~~~~~~~~~~ .. _cmd-main-options: Options: -------- .. _cmd-main-option-version: ``--version`` ************** Display the version of Python Semantic Release and exit .. _cmd-main-option-noop: ``--noop`` ********** Use this flag to see what ``semantic-release`` intends to do without making changes to your project. When using this option, ``semantic-release`` can be run as many times as you wish without any side-effects. .. _cmd-main-option-verbosity: ``-v/--verbose`` ****************** Can be supplied more than once. Controls the verbosity of ``semantic-releases`` logging output (default level is ``WARNING``, use ``-v`` for ``INFO`` and ``-vv`` for ``DEBUG``). .. _cmd-main-option-config: ``-c/--config [FILE]`` ********************** Specify the configuration file which Python Semantic Release should use. This can be any of the supported formats valid for :ref:`cmd-generate-config-option-format` **Default:** pyproject.toml .. seealso:: - :ref:`configuration` .. _cmd-main-option-strict: ``--strict`` ************ Enable Strict Mode. This will cause a number of conditions to produce a non-zero exit code when passed, where they would otherwise have produced an exit code of 0. Enabling this allows, for example, certain conditions to cause failure of a CI pipeline, while omitting this flag would allow the pipeline to continue to run. .. seealso:: - :ref:`strict-mode` .. _cmd-version: ``semantic-release version`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Detect the semantically correct next version that should be applied to your project and release it. By default (in order): #. Write this new version to the project metadata locations specified in the configuration file #. Update the changelog file with the new version and any changes introduced since the last release, using the configured changelog template #. Build the project using :ref:`config-build_command`, if specified #. Create a new commit with these locations and any other assets configured to be included in a release #. Tag this commit according the configured format, with a tag that uniquely identifies the version being released #. Push the new tag and commit to the remote for the repository #. Create a release in the remote VCS for this tag (if supported) All of these steps can be toggled on or off using the command line options described below. Some of the steps rely on others, so some options may implicitly disable others. Changelog generation is done identically to the way it is done in :ref:`cmd-changelog`, but this command additionally ensures the updated changelog is included in the release commit that is made. **Common Variations** .. code-block:: bash # Print the next version that will be applied semantic-release version --print # Print the next version that will be applied, including the tag prefix semantic-release version --print-tag # Print the last released version semantic-release version --print-last-released # Print the last released version, including the tag prefix semantic-release version --print-last-released-tag # Only stamp the next version in the project metadata locations semantic-release version --no-changelog --skip-build --no-commit --no-tag # Stamp the version, update the changelog, and run the build command, then stop semantic-release version --no-commit --no-tag # Make all local changes but do not publish them to the remote (changelog, build, commit & tag) semantic-release version --no-push # Don't ever create a changelog (but do everything else) semantic-release version --no-changelog # Don't create a release in the remote VCS (but do publish the commit and tag) semantic-release version --no-vcs-release # Do everything semantic-release version .. seealso:: - :ref:`Ultraviolet (uv) integration ` - :ref:`cmd-changelog` - :ref:`changelog-templates` - :ref:`config-tag_format` - :ref:`config-assets` - :ref:`config-version_toml` - :ref:`config-version_variables` .. _cmd-version-options: Options: -------- .. _cmd-version-option-print: ``--print`` *********** Print the next version that will be applied, respecting the other command line options that are supplied, and exit. This flag is useful if you just want to see what the next version will be. Note that instead of printing nothing at all, if no release will be made, the current version is printed. For example, you can experiment with which versions would be applied using the other command line options:: semantic-release version --print semantic-release version --patch --print semantic-release version --prerelease --print .. _cmd-version-option-print-tag: ``--print-tag`` *************** Same as the :ref:`cmd-version-option-print` flag but prints the complete tag name (ex. ``v1.0.0`` or ``py-v1.0.0``) instead of the raw version number (``1.0.0``). .. _cmd-version-option-print-last-released: ``--print-last-released`` ************************* Print the last released version based on the Git tags. This flag is useful if you just want to see the released version without determining what the next version will be. Note if the version can not be found nothing will be printed. .. _cmd-version-option-print-last-released-tag: ``--print-last-released-tag`` ***************************** Same as the :ref:`cmd-version-option-print-last-released` flag but prints the complete tag name (ex. ``v1.0.0`` or ``py-v1.0.0``) instead of the raw version number (``1.0.0``). .. _cmd-version-option-force-level: ``--major/--minor/--patch/--prerelease`` **************************************** Force the next version to increment the major, minor or patch digits, or the prerelease revision, respectively. These flags are optional but mutually exclusive, so only one may be supplied, or none at all. Using these flags overrides the usual calculation for the next version; this can be useful, say, when a project wants to release its initial 1.0.0 version. .. warning:: Using these flags will override the configured value of ``prerelease`` (configured in your :ref:`Release Group`), **regardless of your configuration or the current version**. To produce a prerelease with the appropriate digit incremented you should also supply the :ref:`cmd-version-option-as-prerelease` flag. If you do not, using these flags will force a full (non-prerelease) version to be created. For example, suppose your project's current version is ``0.2.1-rc.1``. The following shows how these options can be combined with ``--as-prerelease`` to force different versions: .. code-block:: bash semantic-release version --prerelease --print # 0.2.1-rc.2 semantic-release version --patch --print # 0.2.2 semantic-release version --minor --print # 0.3.0 semantic-release version --major --print # 1.0.0 semantic-release version --minor --as-prerelease --print # 0.3.0-rc.1 semantic-release version --prerelease --as-prerelease --print # 0.2.1-rc.2 These options are forceful overrides, but there is no action required for subsequent releases performed using the usual calculation algorithm. Supplying ``--prerelease`` will cause Python Semantic Release to scan your project history for any previous prereleases with the same major, minor and patch versions as the latest version and the same :ref:`prerelease token` as the one passed by command-line or configuration. If one is not found, ``--prerelease`` will produce the next version according to the following format: .. code-block:: python f"{latest_version.major}.{latest_version.minor}.{latest_version.patch}-{prerelease_token}.1" However, if Python Semantic Release identifies a previous *prerelease* version with the same major, minor and patch digits as the latest version, *and* the same prerelease token as the one supplied by command-line or configuration, then Python Semantic Release will increment the revision found on that previous prerelease version in its new version. For example, if ``"0.2.1-rc.1"`` and already exists as a previous version, and the latest version is ``"0.2.1"``, invoking the following command will produce ``"0.2.1-rc.2"``: .. code-block:: bash semantic-release version --prerelease --prerelease-token "rc" --print .. warning:: This is true irrespective of the branch from which ``"0.2.1-rc.1"`` was released from. The check for previous prereleases "leading up to" this normal version is intended to help prevent collisions in git tags to an extent, but isn't foolproof. As the example shows it is possible to release a prerelease for a normal version that's already been released when using this flag, which would in turn be ignored by tools selecting versions by `SemVer precedence rules`_. .. _SemVer precedence rules: https://semver.org/#spec-item-11 .. seealso:: - :ref:`configuration` - :ref:`config-branches` .. _cmd-version-option-as-prerelease: ``--as-prerelease`` ******************* After performing the normal calculation of the next version, convert the resulting next version to a prerelease before applying it. As with :ref:`cmd-version-option-force-level`, this option is a forceful override, but no action is required to resume calculating versions as normal on the subsequent releases. The main distinction between ``--prerelease`` and ``--as-prerelease`` is that the latter will not *force* a new version if one would not have been released without supplying the flag. This can be useful when making a single prerelease on a branch that would typically release normal versions. If not specified in :ref:`cmd-version-option-prerelease-token`, the prerelease token is identified using the :ref:`Multibranch Release Configuration ` See the examples alongside :ref:`cmd-version-option-force-level` for how to use this flag. .. _cmd-version-option-prerelease-token: ``--prerelease-token [VALUE]`` ****************************** Force the next version to use the value as the prerelease token. This overrides the configured value if one is present. If not used during a release producing a prerelease version, this option has no effect. .. _cmd-version-option-build-metadata: ``--build-metadata [VALUE]`` **************************** If given, append the value to the newly calculated version. This can be used, for example, to attach a run number from a CI service or a date to the version and tag that are created. This value can also be set using the environment variable ``PSR_BUILD_METADATA`` For example, assuming a project is currently at version 1.2.3:: $ semantic-release version --minor --print 1.3.0 $ semantic-release version --minor --print --build-metadata "run.12345" 1.3.0+run.12345 .. _cmd-version-option-commit: ``--commit/--no-commit`` ************************ Whether or not to perform a ``git commit`` on modifications to source files made by ``semantic-release`` during this command invocation, and to run ``git tag`` on this new commit with a tag corresponding to the new version. If ``--no-commit`` is supplied, it may disable other options derivatively; please see below. **Default:** ``--commit`` .. seealso:: - :ref:`tag_format ` .. _cmd-version-option-tag: ``--tag/--no-tag`` ************************ Whether or not to perform a ``git tag`` to apply a tag of the corresponding to the new version during this command invocation. This option manages the tag application separate from the commit handled by the ``--commit`` option. If ``--no-tag`` is supplied, it may disable other options derivatively; please see below. **Default:** ``--tag`` .. _cmd-version-option-changelog: ``--changelog/--no-changelog`` ****************************** Whether or not to update the changelog file with changes introduced as part of the new version released. **Default:** ``--changelog`` .. seealso:: - :ref:`config-changelog` - :ref:`changelog-templates` .. _cmd-version-option-push: ``--push/--no-push`` ******************** Whether or not to push new commits and/or tags to the remote repository. **Default:** ``--no-push`` if :ref:`--no-commit ` and :ref:`--no-tag ` is also supplied, otherwise ``push`` is the default. .. _cmd-version-option-vcs-release: ``--vcs-release/--no-vcs-release`` ********************************** Whether or not to create a "release" in the remote VCS service, if supported. If releases aren't supported in a remote VCS, this option will not cause a command failure, but will produce a warning. **Default:** ``--no-vcs-release`` if ``--no-push`` is supplied (including where this is implied by supplying only ``--no-commit``), otherwise ``--vcs-release`` .. _cmd-version-option-skip_build: ``--skip-build`` **************** If passed, skip execution of the :ref:`build_command ` after version stamping and changelog generation. .. _cmd-publish: ``semantic-release publish`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Publish a distribution to a VCS release. Uploads using :ref:`config-publish` .. seealso:: - :ref:`config-publish` - :ref:`config-build_command` .. _cmd-publish-options: Options: -------- .. _cmd-publish-option-tag: ``--tag`` ********* The tag associated with the release to publish to. If not given or set to "latest", then Python Semantic Release will examine the Git tags in your repository to identify the latest version, and attempt to publish to a Release corresponding to this version. **Default:** "latest" .. _cmd-generate-config: ``semantic-release generate-config`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Generate default configuration for semantic-release, to help you get started quickly. You can inspect the defaults, write to a file and then edit according to your needs. For example, to append the default configuration to your pyproject.toml file, you can use the following command:: $ semantic-release generate-config -f toml --pyproject >> pyproject.toml If your project doesn't already leverage TOML files for configuration, it might better suit your project to use JSON instead:: $ semantic-release generate-config -f json If you would like to add JSON configuration to a shared file, e.g. ``package.json``, you can then simply add the output from this command as a **top-level** key to the file. **Note:** Because there is no "null" or "nil" concept in TOML (see the relevant `GitHub issue`_), configuration settings which are ``None`` by default are omitted from the default configuration. .. _`GitHub issue`: https://github.com/toml-lang/toml/issues/30 .. seealso:: - :ref:`configuration` .. _cmd-generate-config-options: Options: -------- .. _cmd-generate-config-option-format: ``-f/--format [FORMAT]`` ************************ The format that the default configuration should be generated in. Valid choices are ``toml`` and ``json`` (case-insensitive). **Default:** toml .. _cmd-generate-config-option-pyproject: ``--pyproject`` *************** If used alongside ``--format json``, this option has no effect. When using ``--format=toml``, if specified the configuration will sit under a top-level key of ``tool.semantic_release`` to comply with `PEP 518`_; otherwise, the configuration will sit under a top-level key of ``semantic_release``. .. _PEP 518: https://peps.python.org/pep-0518/#tool-table .. _cmd-changelog: ``semantic-release changelog`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Generate and optionally publish a changelog for your project. The changelog is generated based on a template which can be customized. Python Semantic Release uses Jinja_ as its templating engine; as a result templates need to be written according to the `Template Designer Documentation`_. .. _Jinja: https://jinja.palletsprojects.com/ .. _`Template Designer Documentation`: https://jinja.palletsprojects.com/en/3.1.x/templates/ .. seealso:: - :ref:`config-changelog` - :ref:`config-changelog-environment` - :ref:`changelog-templates` Options: -------- .. _cmd-changelog-option-post-to-release-tag: ``--post-to-release-tag [TAG]`` ******************************* If supplied, attempt to find a release in the remote VCS corresponding to the Git tag ``TAG``, and post the generated changelog to that release. If the tag exists but no corresponding release is found in the remote VCS, then Python Semantic Release will attempt to create one. If using this option, the relevant authentication token *must* be supplied via the relevant environment variable. python-semantic-release-10.4.1/docs/concepts/000077500000000000000000000000001506116242600211065ustar00rootroot00000000000000python-semantic-release-10.4.1/docs/concepts/changelog_templates.rst000066400000000000000000001324451506116242600256560ustar00rootroot00000000000000.. _changelog-templates: Version Change Reports ====================== When using the :ref:`cmd-version` and :ref:`cmd-changelog` commands, Python Semantic Release (PSR) will generate a changelog and release notes for your project automatically in the default configuration. The changelog is rendered using the `Jinja`_ template engine, and in the default configuration, PSR will use a built-in template file to render the changelog at the file location defined by the :ref:`changelog_file ` setting. Through the use of the templating engine & the :ref:`template_dir ` configuration setting, you can customize the appearance of your changelog and release notes content. You may also generate a set of files using your custom template directory and the templates will be rendered relative to the root of your repository. Because PSR uses a third-party library, `Jinja`_, as its template engine, we do not include all the syntax within our documentation but rather you should refer to the `Template Designer Documentation`_ for guidance on how to customize the appearance of your release files. If you would like to customize the template environment itself, then certain options are available to you via :ref:`changelog environment configuration `. If you do not want to use the changelog generation features, you can disable changelog generation entirely during the :ref:`cmd-version` command by providing the :ref:`--no-changelog ` command-line option. .. _Jinja: https://jinja.palletsprojects.com/en/3.1.x/ .. _Template Designer Documentation: https://jinja.palletsprojects.com/en/3.1.x/templates/ .. _changelog-templates-default_changelog: Using the Default Changelog --------------------------- If you don't provide any custom templates in the :ref:`changelog.template_dir `, the default changelog templates will be used to render the changelog. PSR provides two default changelog output formats: 1. Markdown (``.md``), *default* 2. reStructuredText (``.rst``), *available since v9.11.0* Both formats are kept in sync with one another to display the equivalent information in the respective format. The default changelog template is located in the ``data/templates/`` directory within the PSR package. The templates are written in modular style (ie. multiple files) and during the render process are ultimately combined together to render the final changelog output. The rendering start point is the ``CHANGELOG.{FORMAT_EXT}.j2`` underneath the respective format directory. PSR provides a few configuration options to customize the default changelog output and can be found under the :ref:`changelog.default_templates ` section as well as some common configuration options under the :ref:`config-changelog` section. To toggle the output format, you only need to set the :ref:`changelog.default_templates.changelog_file ` file name to include the desired file extension (``.md`` or ``.rst``). If you would like a different extension for the resulting changelog file, but would like to still have control over the template format, you can set the :ref:`changelog.default_templates.output_format ` configuration setting to the desired format. A common and *highly-recommended* configuration option is the :ref:`changelog.exclude_commit_patterns ` setting which allows the user to define regular expressions that will exclude commits from the changelog output. This is useful to filter out change messages that are not relevant to your external consumers (ex. ``ci`` and ``test`` in the conventional commit standard) and only include the important changes that impact the consumer of your software. Another important configuration option is the :ref:`changelog.mode ` setting which determines the behavior of the changelog generation. There are 2 modes that available that described in detail below. 1. :ref:`changelog-templates-default_changelog-init` when ``mode = "init"``. 2. :ref:`changelog-templates-default_changelog-update` when ``mode = "update"``. .. _changelog-templates-default_changelog-init: Initialization Mode ^^^^^^^^^^^^^^^^^^^ When using the initialization mode, the changelog file will be created from scratch using the entire git history and **overwrite** any existing changelog file. This is the default behavior introduced in ``v8.0.0``. This is useful when you are trying to convert over to Python Semantic Release for the first time or when you want to automatically update the entire format of your changelog file. .. warning:: If you have an existing changelog in the location you have configured with the :ref:`changelog.changelog_file ` setting, PSR will overwrite the contents of this file on each release. Please make sure to refer to :ref:`changelog-templates-migrating-existing-changelog`. .. _changelog-templates-default_changelog-update: Update Mode ^^^^^^^^^^^^ .. note:: Introduced in ``v9.10.0``. When using the update mode, only the change information from the last release will be prepended into the existing changelog file (defined by the :ref:`changelog.changelog_file `). This mimics the behavior that was used in versions prior to ``v8.0.0`` before the conversion to a templating engine but now uses the `Jinja`_ to accomplish the update. This mode is best suited for managing changes over the lifetime of your project when you may have a need to make manual changes or adjustments to the changelog and its not easily recreated with a template. **How It Works** In order to insert the new release information into an existing changelog file, your changelog file must have an insertion flag to indicate where the new release information should be inserted. The default template will read in your existing changelog file, split the content based on the insertion flag, and then recombine the content (including the insertion flag) with the new release information added after the insertion flag. The insertion flag is customizable through the :ref:`changelog.insertion_flag ` setting. Generally, your insertion flag should be unique text to your changelog file to avoid any unexpected behavior. See the examples below. In the case where the insertion flag is **NOT** found in the existing changelog file, the changelog file will be re-written without any changes. If there is no existing changelog file found, then the changelog file will be initialized from scratch as if the mode was set to ``init``, except the :ref:`changelog.insertion_flag ` will be included into the newly created changelog file. .. tip:: We have accomplished changelog updating through the use of the `Jinja`_ templating and additional context filters and context variables. This is notable because in the case that you want to customize your changelog template, you now can use the same logic to enable changelog updates of your custom template! .. seealso:: - :ref:`changelog-templates-migrating-existing-changelog`. **Example** Given your existing changelog looks like the following with a :ref:`changelog.insertion_flag ` set to ````, when you run the :ref:`cmd-version` command, the new release information will be inserted after the insertion flag. **Before** .. code:: markdown # CHANGELOG ## 1.0.0 - Initial Release **After** .. code:: markdown # CHANGELOG ## v1.1.0 ### Feature - feat: added a new feature ### Fix - fix: resolved divide by zero error ## 1.0.0 - Initial Release .. _changelog-templates-default_changelog-examples: Configuration Examples ^^^^^^^^^^^^^^^^^^^^^^ 1. Goal: Configure an updating reStructuredText changelog with a custom insertion flag within ``pyproject.toml``. .. code:: toml [tool.semantic_release.changelog] mode = "update" insertion_flag = "..\n All versions below are listed in reverse chronological order" [tool.semantic_release.changelog.default_templates] changelog_file = "CHANGELOG.rst" output_format = "rst" # optional because of the file extension 2. Goal: Configure an updating Markdown changelog with custom file name and default insertion flag within a separate config file ``releaserc.json``. .. code:: json { "semantic_release": { "changelog": { "mode": "update", "default_templates": { "changelog_file": "docs/HISTORY", "output_format": "md" } } } } 3. Goal: Configure an initializing reStructuredText changelog with filtered conventional commits patterns and merge commits within a custom config file ``releaserc.toml``. .. code:: toml [semantic_release.changelog] mode = "init" default_templates = { changelog_file = "docs/CHANGELOG.rst" } exclude_commit_patterns = [ '''chore(?:\([^)]*?\))?: .+''', '''ci(?:\([^)]*?\))?: .+''', '''refactor(?:\([^)]*?\))?: .+''', '''style(?:\([^)]*?\))?: .+''', '''test(?:\([^)]*?\))?: .+''', '''build\((?!deps\): .+)''', '''Merged? .*''', ] If identified or supported by the parser, the default changelog templates will include a separate section of breaking changes and additional release information. Refer to the :ref:`commit parsing ` section to see how to write commit messages that will be properly parsed and displayed in these sections. .. _changelog-templates-default_release_notes: Using the Default Release Notes ------------------------------- PSR has the capability to generate release notes as part of the publishing of a new version similar to the changelog. The release notes are generated using a `Jinja`_ template and posted to the your remote version control server (VCS) such as GitHub, GitLab, etc during the :ref:`cmd-version` command. PSR provides a default built-in template out-of-the-box for generating release notes. The difference between the changelog and release notes is that the release notes only contain the changes for the current release. Due to the modularity of the PSR templates, the format is similar to an individual version of the default changelog but may include other version specific information. At this time, the default template for version release notes is only available in Markdown format for all VCS types. If you want to review what the default release notes look like you can use the following command to print the release notes to the console (remove any configuration for defining a custom template directory): .. code:: console # Create a current tag git tag v1.0.0 semantic-release --noop changelog --post-to-release-tag v1.0.0 The default template provided by PSR will respect the :ref:`config-changelog-default_templates-mask_initial_release` setting and will also add a comparison link to the previous release if one exists without customization. As of ``v9.18.0``, the default release notes will also include a statement to declare which license the project was released under. PSR determines which license to declare based on the value of ``project.license-expression`` in the ``pyproject.toml`` file as defined in the `PEP 639`_ specification. .. seealso:: - To personalize your release notes, see the :ref:`changelog-templates-custom_release_notes` section. .. _PEP 639: https://peps.python.org/pep-0639/ .. _changelog-templates-template-rendering: Custom Changelogs ----------------- If you would like to customize the appearance of your changelog, you can create your own custom templates and configure PSR to render your templates instead during the :ref:`cmd-version` and :ref:`cmd-changelog` commands. To use a custom template, you need to create a directory within your repository and set the :ref:`template_dir ` setting to the name of this directory. The default name is ``"templates"``. Templates are identified by giving a ``.j2`` extension to the template file. Any such templates have the ``.j2`` extension removed from the target file. Therefore, to render an output file ``foo.csv``, you should create a template called ``foo.csv.j2`` within your template directory. If you have additional files that you would like to render alongside your changelog, you can place these files within the template directory. A file within your template directory which does *not* end in ``.j2`` will not be treated as a template; it will be copied to its target location without being rendered by the template engine. .. tip:: Hidden files within the template directory (i.e. filenames that begin with a period ``"."``) are *excluded* from the rendering process. Hidden folders within the template directory are also excluded, *along with all files and folders contained within them*. This is useful for defining macros or other template components that should not be rendered individually. .. tip:: When initially starting out at customizing your own changelog templates, you should reference the default template embedded within PSR. The template directory is located at ``data/templates/`` within the PSR package. Within our templates directory we separate out each type of commit parser (e.g. conventional) and the content format type (e.g. markdown). You can copy this directory to your repository's templates directory and then customize the templates to your liking. .. _changelog-templates-template-rendering-directory-structure: Directory Structure ^^^^^^^^^^^^^^^^^^^ When the templates are rendered, files within the templates directory tree are output to the location within your repository that has the *same relative path* to the root of your project as the *relative path of the template within the templates directory*. **Example** An example project has the following structure: .. code-block:: example-project/ ├── src/ │   └── example_project/ │   └── __init__.py └── ch-templates/ ├── CHANGELOG.md.j2 ├── .components/ │   └── authors.md.j2 ├── .macros.j2 ├── src/ │   └── example_project/ │   └── data/ │   └── data.json.j2 └── static/ └── config.cfg And a custom templates folder configured via the following snippet in ``pyproject.toml``: .. code-block:: toml [tool.semantic_release.changelog] template_dir = "ch-templates" After running a release with Python Semantic Release, the directory structure of the project will now look like this (excluding the template directory): .. code-block:: example-project/ ├── CHANGELOG.md ├── src/ │   └── example_project/ │   ├── data/ │   │   └── data.json │   └── __init__.py └── static/ └── config.cfg Importantly, note the following: * There is no top-level ``.macros`` file created, because hidden files are excluded from the rendering process. * There is no top-level ``.components`` directory created, because hidden folders and all files and folders contained within it are excluded from the rendering process. * The ``.components/authors.md.j2`` file is not rendered directly, however, it is used as a component to the ``CHANGELOG.md.j2`` via an ``include`` statement in the changelog template. * To render data files into the ``src/`` folder, the path to which the template should be rendered has to be created within the ``ch-templates`` directory. * The ``ch-templates/static`` folder is created at the top-level of the project, and the file ``ch-templates/static/config.cfg`` is *copied, not rendered* to the new top-level ``static`` folder. You may wish to leverage this behavior to modularize your changelog template, to define macros in a separate file, or to reference static data which you would like to avoid duplicating between your template environment and the remainder of your project. .. _changelog-templates-template-rendering-template-context: Changelog Template Context ^^^^^^^^^^^^^^^^^^^^^^^^^^ During the rendering of a directory tree, Python Semantic Release provides information about the history of the project available within the templating environment in order for it to be used to generate the changelog and other desired documents. Important project information is provided to the templating environment through the global variable ``context`` or ``ctx`` for short. Within the template environment, the ``context`` object has the following attributes: * ``changelog_insertion_flag (str)``: the insertion flag used to determine where the new release information should be inserted into the changelog file. This value is passed directly from :ref:`changelog.insertion_flag `. *Introduced in v9.10.0.* **Example Usage:** .. code:: jinja {% set changelog_parts = prev_changelog_contents.split( ctx.changelog_insertion_flag, maxsplit=1 ) %} * ``changelog_mode (Literal["init", "update"])``: the mode of the changelog generation currently being used. This can be used to determine different rendering logic. This value is passed directly from the :ref:`changelog.mode ` configuration setting. *Introduced in v9.10.0.* **Example Usage:** .. code:: jinja {% if ctx.changelog_mode == "init" %}{% include ".changelog_init.md.j2" %}{# #}{% elif ctx.changelog_mode == "update" %}{% include ".changelog_update.md.j2" %}{# #}{% endif %} * ``history (ReleaseHistory)``: the :class:`ReleaseHistory ` instance for the project (See the :ref:`Release History ` section for more information). **Example Usage:** .. code:: jinja {% set unreleased_commits = ctx.history.unreleased | dictsort %}{% for release in context.history.released.values() %}{% include ".versioned_changes.md.j2" #}{% endfor %} * ``hvcs_type (str)``: the name of the VCS server type currently configured. This can be used to determine which filters are available or different rendering logic. *Introduced in v9.6.0.* **Example Usage:** .. code:: jinja {% if ctx.hvcs_type == "github" %}{{ "29" | pull_request_url }}{# #}{% elif ctx.hvcs_type == "gitlab" %}{{ "29" | merge_request_url }}{# #}{% endif %} * ``mask_initial_release (bool)``: a boolean value indicating whether the initial release should be masked with a generic message. This value is passed directly from the :ref:`changelog.default_templates.mask_initial_release ` configuration setting. *Introduced in v9.14.0.* **Example Usage:** .. code:: jinja #}{% if releases | length == 1 and ctx.mask_initial_release %}{# # On a first release, generate a generic message #}{% include ".components/first_release.md.j2" %}{% else %}{# # Not the first release #}{% include ".components/versioned_changes.md.j2" %}{% endif %} * ``repo_name (str)``: the name of the current repository parsed from the Git url. **Example Usage:** .. code:: jinja {{ ctx.repo_name }} .. code:: markdown example_repo * ``repo_owner (str)``: the owner of the current repository parsed from the Git url. **Example Usage:** .. code:: jinja {{ ctx.repo_owner }} .. code:: markdown example_org * ``prev_changelog_file (str)``: the path to the previous changelog file that should be updated with the new release information. This value is passed directly from :ref:`changelog.changelog_file `. *Introduced in v9.10.0.* **Example Usage:** .. code:: jinja {% set prev_changelog_contents = prev_changelog_file | read_file | safe %} .. _changelog-templates-template-rendering-template-context-release-history: Release History """"""""""""""" A :py:class:`ReleaseHistory ` object has two attributes: ``released`` and ``unreleased``. The ``unreleased`` attribute is of type ``Dict[str, List[ParseResult]]``. Each commit in the current branch's commit history since the last release on this branch is grouped by the ``type`` attribute of the :py:class:`ParsedCommit ` returned by the commit parser, or if the parser returned a :py:class:`ParseError ` then the result is grouped under the ``"unknown"`` key. For this reason, every element of ``ReleaseHistory.unreleased["unknown"]`` is a :py:class:`ParseError `, and every element of every other value in ``ReleaseHistory.unreleased`` is of type :py:class:`ParsedCommit `. Typically, commit types will be ``"feature"``, ``"fix"``, ``"breaking"``, though the specific types are determined by the parser. For example, the :py:class:`EmojiCommitParser ` uses a textual representation of the emoji corresponding to the most significant change introduced in a commit (e.g. ``":boom:"``) as the different commit types. As a template author, you are free to customize how these are presented in the rendered template. .. note:: If you are using a custom commit parser following the guide at :ref:`commit_parser-custom_parser`, your custom implementations of :py:class:`ParseResult `, :py:class:`ParseError ` and :py:class:`ParsedCommit ` will be used in place of the built-in types. The ``released`` attribute is of type ``Dict[Version, Release]``. The keys of this dictionary correspond to each version released within this branch's history, and are of type :py:class:`Version `. You can use the ``as_tag()`` method to render these as the Git tag that they correspond to inside your template. A :py:class:`Release ` object has an ``elements`` attribute, which has the same structure as the ``unreleased`` attribute of a :py:class:`ReleaseHistory `; that is, ``elements`` is of type ``Dict[str, List[ParseResult]]``, where every element of ``elements["unknown"]`` is a :py:class:`ParseError `, and elements of every other value correspond to the ``type`` attribute of the :py:class:`ParsedCommit ` returned by the commit parser. The commits represented within each ``ReleaseHistory.released[version].elements`` grouping are the commits which were made between version and the release corresponding to the previous version. That is, given two releases ``Version(1, 0, 0)`` and ``Version(1, 1, 0)``, ``ReleaseHistory.released[Version(1, 0, 0)].elements`` contains only commits made after the release of ``Version(1, 0, 0)`` up to and including the release of ``Version(1, 1, 0)``. To maintain a consistent order of subsections in the changelog headed by the commit type, it's recommended to use Jinja's `dictsort `_ filter. Each :py:class:`Release ` object also has the following attributes: * ``tagger: git.Actor``: The tagger who tagged the release. * ``committer: git.Actor``: The committer who made the release commit. * ``tagged_date: datetime``: The date and time at which the release was tagged. .. seealso:: * :ref:`commit_parser-builtin` * :ref:`Commit Parser Tokens ` * `git.Actor `_ * `datetime.strftime Format Codes `_ .. _changelog-templates-custom_templates-filters: Changelog Template Filters ^^^^^^^^^^^^^^^^^^^^^^^^^^ In addition to the context variables, PSR seeds the template environment with a set of custom functions (commonly called ``filters`` in `Jinja`_ terminology) for use within the template. Filter's first argument is always piped (``|``) to the function while any additional arguments are passed in parentheses like normal function calls. The filters provided vary based on the VCS configured and available features: * ``autofit_text_width (Callable[[textStr, maxWidthInt, indent_sizeInt], textStr])``: given a text string, fit the text to the maximum width provided. This filter is useful when you want to wrap text to a specific width. The filter will attempt to break the text at word boundaries and will indent the text by the amount specified in the ``indent_size`` parameter. *Introduced in v9.12.0.* **Example Usage:** .. code:: jinja {{ "This is a long string that needs to be wrapped to a specific width" | autofit_text_width(40, 4) }} **Markdown Output:** .. code:: markdown This is a long string that needs to be wrapped to a specific width * ``convert_md_to_rst (Callable[[MdStr], RstStr])``: given a markdown string, convert it to reStructuredText format. This filter is useful when building a reStructuredText changelog but your commit messages are in markdown format. It is utilized by the default RST changelog template. It is limited in its ability to convert all markdown to reStructuredText, but it handles most common cases (bold, italics, inline-raw, etc.) within commit messages. *Introduced in v9.11.0.* **Example Usage:** .. code:: jinja {{ "\n* %s (`%s`_)\n" | format( commit.message.rstrip() | convert_md_to_rst, commit.short_hash, ) }} * ``create_pypi_url(package_name: str, version: str = "")``: given a package name and an optional version, return a URL to the PyPI page for the package. If a version is provided, the URL will point to the specific version page. If no version is provided, the URL will point to the package page. *Introduced in v9.18.0.* **Example Usage:** .. code:: jinja {{ "example-package" | create_pypi_url }} {{ "example-package" | create_pypi_url("1.0.0") }} **Markdown Output:** .. code:: markdown https://pypi.org/project/example-package https://pypi.org/project/example-package/1.0.0 * ``create_release_url (Callable[[TagStr], UrlStr])``: given a tag, return a URL to the release page on the remote vcs. This filter is useful when you want to link to the release page on the remote vcs. *Introduced in v9.18.0.* **Example Usage:** .. code:: jinja {{ "v1.0.0" | create_release_url }} **Markdown Output:** .. code:: markdown https://example.com/example/repo/releases/tag/v1.0.0 * ``create_server_url (Callable[[PathStr, AuthStr | None, QueryStr | None, FragmentStr | None], UrlStr])``: when given a path, prepend the configured vcs server host and url scheme. Optionally you can provide, a auth string, a query string or a url fragment to be normalized into the resulting url. Parameter order is as described above respectively. *Introduced in v9.6.0.* **Example Usage:** .. code:: jinja {{ "example/repo.git" | create_server_url }} {{ "example/repo" | create_server_url(None, "results=1", "section-header") }} **Markdown Output:** .. code:: markdown https://example.com/example/repo.git https://example.com/example/repo?results=1#section-header * ``create_repo_url (Callable[[RepoPathStr, QueryStr | None, FragmentStr | None], UrlStr])``: when given a repository path, prepend the configured vcs server host, and repo namespace. Optionally you can provide, an additional query string and/or a url fragment to also put in the url. Parameter order is as described above respectively. This is similar to ``create_server_url`` but includes the repo namespace and owner automatically. *Introduced in v9.6.0.* **Example Usage:** .. code:: jinja {{ "releases/tags/v1.0.0" | create_repo_url }} {{ "issues" | create_repo_url("q=is%3Aissue+is%3Aclosed") }} **Markdown Output:** .. code:: markdown https://example.com/example/repo/releases/tags/v1.0.0 https://example.com/example/repo/issues?q=is%3Aissue+is%3Aclosed * ``commit_hash_url (Callable[[hashStr], UrlStr])``: given a commit hash, return a URL to the commit in the remote. *Introduced in v8.0.0.* **Example Usage:** .. code:: jinja {{ commit.hexsha | commit_hash_url }} **Markdown Output:** .. code:: markdown https://example.com/example/repo/commit/a1b2c3d435657f5d339ba10c7b1ed81b460af51d * ``compare_url (Callable[[StartRefStr, StopRefStr], UrlStr])``: given a starting git reference and a ending git reference create a comparison url between the two references that can be opened on the remote *Introduced in v9.6.0.* **Example Usage:** .. code:: jinja {{ "v1.0.0" | compare_url("v1.1.0") }} **Markdown Output:** .. code:: markdown https://example.com/example/repo/compare/v1.0.0...v1.1.0 * ``issue_url (Callable[[IssueNumStr | IssueNumInt], UrlStr])``: given an issue number, return a URL to the issue on the remote vcs. In v9.12.2, this filter was updated to handle a string that has leading prefix symbols (ex. ``#32``) and will strip the prefix before generating the URL. *Introduced in v9.6.0, Modified in v9.12.2.* **Example Usage:** .. code:: jinja {# Add Links to issues annotated in the commit message # NOTE: commit.linked_issues is only available in v9.15.0 or greater # #}{% for issue_ref in commit.linked_issues %}{{ "- [%s](%s)" | format(issue_ref, issue_ref | issue_url) }}{% endfor %} **Markdown Output:** .. code:: markdown - [#32](https://example.com/example/repo/issues/32) * ``merge_request_url (Callable[[MergeReqStr | MergeReqInt], UrlStr])``: given a merge request number, return a URL to the merge request in the remote. This is an alias to the ``pull_request_url`` but only available for the VCS that uses the merge request terminology. In v9.12.2, this filter was updated to handle a string that has leading prefix symbols (ex. ``#29``) and will strip the prefix before generating the URL. *Introduced in v9.6.0, Modified in v9.12.2.* **Example Usage:** .. code:: jinja {{ "[%s](%s)" | format( commit.linked_merge_request, commit.linked_merge_request | merge_request_url ) }} {# commit.linked_merge_request is only available in v9.13.0 or greater #} **Markdown Output:** .. code:: markdown [#29](https://example.com/example/repo/-/merge_requests/29) * ``pull_request_url (Callable[[PullReqStr | PullReqInt], UrlStr])``: given a pull request number, return a URL to the pull request in the remote. For remote vcs' that use merge request terminology, this filter is an alias to the ``merge_request_url`` filter function. In v9.12.2, this filter was updated to handle a string that has leading prefix symbols (ex. ``#29``) and will strip the prefix before generating the URL. *Introduced in v9.6.0, Modified in v9.12.2.* **Example Usage:** .. code:: jinja {# Create a link to the merge request associated with the commit # NOTE: commit.linked_merge_request is only available in v9.13.0 or greater #}{{ "[%s](%s)" | format( commit.linked_merge_request, commit.linked_merge_request | pull_request_url ) }} **Markdown Output:** .. code:: markdown [#29](https://example.com/example/repo/pull/29) * ``format_w_official_vcs_name (Callable[[str], str])``: given a format string, insert the official VCS type name into the string and return. This filter is useful when you want to display the proper name of the VCS type in a changelog or release notes. The filter supports three different replace formats: ``%s``, ``{}``, and ``{vcs_name}``. *Introduced in v9.18.0.* **Example Usage:** .. code:: jinja {{ "%s Releases" | format_w_official_vcs_name }} {{ "{} Releases" | format_w_official_vcs_name }} {{ "{vcs_name} Releases" | format_w_official_vcs_name }} **Markdown Output:** .. code:: markdown GitHub Releases GitHub Releases GitHub Releases * ``read_file (Callable[[str], str])``: given a file path, read the file and return the contents as a string. This function was added specifically to enable the changelog update feature where it would load the existing changelog file into the templating environment to be updated. *Introduced in v9.10.0.* **Example Usage:** .. code:: jinja {% set prev_changelog_contents = prev_changelog_file | read_file | safe %} * ``sort_numerically (Callable[[Iterable[str], bool], list[str]])``: given a sequence of strings with possibly some non-number characters as a prefix or suffix, sort the strings as if they were just numbers from lowest to highest. This filter is useful when you want to sort issue numbers or other strings that have a numeric component in them but cannot be cast to a number directly to sort them. If you want to sort the strings in reverse order, you can pass a boolean value of ``True`` as the second argument. *Introduced in v9.16.0.* **Example Usage:** .. code:: jinja {{ ["#222", "#1023", "#444"] | sort_numerically }} {{ ["#222", "#1023", "#444"] | sort_numerically(True) }} **Markdown Output:** .. code:: markdown ['#222', '#444', '#1023'] ['#1023', '#444', '#222'] Availability of the documented filters can be found in the table below: ========================== ========= ===== ====== ====== **filter - hvcs_type** bitbucket gitea github gitlab ========================== ========= ===== ====== ====== autofit_text_width ✅ ✅ ✅ ✅ convert_md_to_rst ✅ ✅ ✅ ✅ create_pypi_url ✅ ✅ ✅ ✅ create_server_url ✅ ✅ ✅ ✅ create_release_url ⌠✅ ✅ ✅ create_repo_url ✅ ✅ ✅ ✅ commit_hash_url ✅ ✅ ✅ ✅ compare_url ✅ ⌠✅ ✅ format_w_official_vcs_name ✅ ✅ ✅ ✅ issue_url ⌠✅ ✅ ✅ merge_request_url ⌠⌠⌠✅ pull_request_url ✅ ✅ ✅ ✅ read_file ✅ ✅ ✅ ✅ sort_numerically ✅ ✅ ✅ ✅ ========================== ========= ===== ====== ====== .. seealso:: * `Filters `_ .. _changelog-templates-template-rendering-example: Example ^^^^^^^ The following template is a simple example of how to render a changelog using the PSR template context to create a changelog in Markdown format. **Configuration:** ``pyproject.toml`` .. code:: toml [tool.semantic_release.changelog] template_dir = "templates" **Template:** ``templates/CHANGELOG.md.j2`` .. code:: jinja # CHANGELOG {% for version, release in ctx.history.released.items() %}{{ "## %s (%s)" | format(version.as_tag(), release.tagged_date.strftime("%Y-%m-%d")) }}{% for type_, commits in release["elements"] if type_ != "unknown" | dictsort %}{{ "### %s" | format(type_ | title) }}{% for commit in commits %}{{ "* %s ([`%s`](%s))" | format( commit.descriptions[0] | capitalize, commit.hexsha[:7], commit.hexsha | commit_hash_url, ) }}{% endfor %}{% endfor %}{% endfor %} **Result:** ``CHANGELOG.md`` .. code:: markdown # CHANGELOG ## v1.1.0 (2022-01-01) ### Feature * Added a new feature ([`a1b2c3d`](https://github.com/example/repo/commit/a1b2c3d)) ## v1.0.0 (2021-12-31) ### Fix * Resolved divide by zero error ([`e4f5g6h`](https://github.com/example/repo/commit/e4f5g6h)) It is important to note that the template utilizes the ``context`` variable to extract the project history as well as the ``commit_hash_url`` filter to generate a URL to the remote VCS for each commit. Both of these are injected into the template environment by PSR. .. _changelog-templates-custom_release_notes: Custom Release Notes -------------------- If you would like to customize the appearance of your release notes, you can add a hidden file named ``.release_notes.md.j2`` at the root of your :ref:`changelog.template_dir `. This file will automatically be detected and used to render the release notes during the :ref:`cmd-version` and :ref:`cmd-changelog` commands. A similar :ref:`template rendering ` mechanism is used to render the release notes as is used for the changelog. There are minor differences in the context available to the release notes template but the template directory structure and modularity is maintained. .. tip:: When initially starting out at customizing your own release notes template, you should reference the default template embedded within PSR. The release notes template can be found in the directory ``data/templates//md`` within the PSR package. .. _changelog-templates-custom_release_notes-context: Release Notes Context ^^^^^^^^^^^^^^^^^^^^^ All of the changelog's :ref:`template context ` is exposed to the `Jinja`_ template when rendering the release notes. Additionally, the following two globals are available to the template: * ``release`` (:py:class:`Release `): contains metadata about the content of the release, as parsed from commit logs *Introduced in v8.0.0.* * ``version`` (:py:class:`Version `): contains metadata about the software version to be released and its ``git`` tag *Introduced in v8.0.0.* .. _changelog-templates-release-notes-template-example: Example ^^^^^^^ Below is an example template that can be used to render release notes (it's similar to GitHub's `automatically generated release notes`_): .. _Automatically generated release notes: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes **Configuration:** ``pyproject.toml`` .. code:: toml [tool.semantic_release.changelog] template_dir = "templates" **Template:** ``templates/.release_notes.md.j2`` .. code:: jinja ## What's Changed {% for type_, commits in release["elements"] | dictsort %}{%- if type_ != "unknown" %}{{ "### %s" | format(type_ | title) }}{% for commit in commits %}{{ "* %s by %s in [`%s`](%s)" | format( commit.descriptions[0] | capitalize, commit.commit.author.name, commit.hexsha[:7], commit.hexsha | commit_hash_url, ) }}{%- endfor %}{% endif %}{% endfor %} **Result:** ``https://github.com/example/repo/releases/tag/v1.1.0`` .. code:: markdown ## What's Changed ### Feature * Added a new feature by John Doe in [`a1b2c3d`](https://github.com/example/repo/commit/a1b2c3d) .. _changelog-templates-migrating-existing-changelog: Migrating an Existing Changelog ------------------------------- **v9.10.0 or greater** Migrating an existing changelog is simple with Python Semantic Release! To preserve your existing changelog, follow these steps: 1. **Set the changelog.mode to "update"** in your configuration file. This will ensure that only the new release information is added to your existing changelog file. 2. **Set the changelog.insertion_flag to a unique string.** You may use the default value or set it to a unique string that is not present in your existing changelog file. This flag is used to determine where the new release information should be inserted into your existing changelog. 3. **Add the insertion flag to your changelog file.** This must match the value you set in step 2. The insertion flag should be placed in the location above where you would like the new release information to be inserted. .. note:: If you are trying to convert an existing changelog to a new format, you will need to do most of the conversion manually (or rebuild via init and modify) and make sure to include your insertion flag into the format of the new changelog. **Prior to v9.10.0** If you have an existing changelog that you would like to preserve, you will need to add the contents of the changelog file to your changelog template - either directly or via Jinja's `include `_ tag. If you would like only the history from your next release onwards to be rendered into the changelog in addition to the existing changelog, you can add an `if statement `_ based upon the versions in the keys of ``context.released``. .. _changelog-templates-upgrading-templates: Upgrading Templates ------------------- As PSR evolves, new features and improvements are added to the templating engine. If you have created your own custom templates, you may need to update them to take advantage of some new features. Below are some instructions on how to upgrade your templates to gain the benefits of the new features. .. _changelog-templates-upgrading-updating_changelog: Incrementally Updating Changelog Template ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. note:: This section is only relevant if you are upgrading from a version of PSR greater than v8.0.0 and prior to ``v9.10.0`` and have created your own custom templates. If you have previously created your own custom templates and would like to gain the benefits of the new updating changelog feature, you will need to make a few changes to your existing templates. The following steps are a few suggestions to help upgrade your templates but primarily you should review the embedded default templates in the PSR package for a full example. You can find the default templates at `data/templates/`__ directory. __ https://github.com/python-semantic-release/python-semantic-release/tree/master/src/semantic_release/data/templates 1. **Add a conditional to check the changelog_mode.** This will allow you to determine if you should render the entire changelog or just the new release information. See ``data/templates/*/md/CHANGELOG.md.j2`` for reference. 2. **Use the new read_file filter** to read in the existing changelog file ``ctx.prev_changelog_file``. This will allow you to include the existing changelog content in your new changelog file. See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. 3. **Split the changelog content based on the insertion flag.** This will allow you to insert the new release information after the insertion flag (``ctx.changelog_insertion_flag``). See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. 4. **Print the leading content before the insertion flag.** This ensures you maintain any content that should be included before the new release information. See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. 5. **Print your insertion flag.** This is imperative to ensure that the resulting changelog can be updated in the future. See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. 6. **Print the new release information.** Be sure to consider both unreleased and released commits during this step because of the :ref:`cmd-changelog` command that can be run at any time. See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. 7. **Print the trailing content after the insertion flag.** This ensures you maintain any content that should be included after the new release information. See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. .. tip:: Modularity of your templates is key to handling both modes of changelog generation. Reference the default templates for examples on how we handle both modes and defensively handle numerous breaking scenarios. .. tip:: If you are having trouble upgrading your templates, please post a question on the `PSR GitHub`__ __ https://github.com/python-semantic-release/python-semantic-release/issues python-semantic-release-10.4.1/docs/concepts/commit_parsing.rst000066400000000000000000001224101506116242600246530ustar00rootroot00000000000000.. _commit_parsing: Commit Parsing ============== One of the core components of Python Semantic Release (PSR) is the commit parser. The commit parser is responsible for parsing a Project's Git Repository commit history to extract insights about project changes and make decisions based on this insight. The primary decision that PSR makes based on the commit history is whether or not to release a new version of the project, and if so, what version number to release. This decision is made based on the commit message descriptions of the change impact introduced by the commit. The change impact describes the impact to the end consumers of the project. Depending on the type of change, the version number will be incremented according to the `Semantic Versioning`_ specification (semver). It is the commit parser's job to extract the change impact from the commit message to determine the severity of the changes and then subsequently determine the semver level that the version should be bumped to for the next release. The commit parser is also responsible for interpreting other aspects of the commit message which can be used to generate a helpful and detailed changelog. This includes extracting the type of change, the scope of the change, any breaking change descriptions, any linked pull/merge request numbers, and any linked issue numbers. PSR provides several :ref:`built-in commit parsers ` to handle a variety of different commit message styles. If the built-in parsers do not meet your needs, you can write your own :ref:`custom parser ` to handle your specific commit message style. .. warning:: PSR's built-in commit parsers are designed to be flexible enough to provide a convenient way to generate the most effective changelogs we can, which means some features are added beyond the scope of the original commit message style guidelines. Other tools may not follow the same conventions as PSR's guideline extensions, so if you plan to use any similar programs in tandem with PSR, you should be aware of the differences in feature support and fall back to the official format guidelines if necessary. .. _Semantic Versioning: https://semver.org/ ---- .. _commit_parser-builtin: Built-in Commit Parsers ----------------------- The following parsers are built in to Python Semantic Release: - :ref:`ConventionalCommitParser ` - :ref:`ConventionalCommitMonorepoParser ` *(available in v10.4.0+)* - :ref:`AngularCommitParser ` *(deprecated in v9.19.0)* - :ref:`EmojiCommitParser ` - :ref:`ScipyCommitParser ` - :ref:`TagCommitParser ` *(deprecated in v9.12.0)* ---- .. _commit_parser-builtin-conventional: Conventional Commits Parser """"""""""""""""""""""""""" *Introduced in v9.19.0* A parser that is designed to parse commits formatted according to the `Conventional Commits Specification`_. The parser is implemented with the following logic in relation to PSR's core features: - **Version Bump Determination**: This parser extracts the commit type from the subject line of the commit (the first line of a commit message). This type is matched against the configuration mapping to determine the level bump for the specific commit. If the commit type is not found in the configuration mapping, the commit is considered a non-parsable commit and will return it as a ParseError object and ultimately a commit of type ``"unknown"``. The configuration mapping contains lists of commit types that correspond to the level bump for each commit type. Some commit types are still valid do not trigger a level bump, such as ``"chore"`` or ``"docs"``. You can also configure the default level bump :ref:`commit_parser_options.default_level_bump ` if desired. To trigger a major release, the commit message body must contain a paragraph that begins with ``BREAKING CHANGE:``. This will override the level bump determined by the commit type. - **Changelog Generation**: PSR will group commits in the changelog by the commit type used in the commit message. The commit type shorthand is converted to a more human-friendly section heading and then used as the version section title of the changelog and release notes. Under the section title, the parsed commit descriptions are listed out in full. If the commit includes an optional scope, then the scope is prefixed on to the first line of the commit description. If a commit has any breaking change prefixed paragraphs in the commit message body, those paragraphs are separated out into a "Breaking Changes" section in the changelog (Breaking Changes section is available from the default changelog in v9.15.0). Each breaking change paragraph is listed in a bulleted list format across the entire version. A single commit is allowed to have more than one breaking change prefixed paragraph (as opposed to the `Conventional Commits Specification`_). Commits with an optional scope and a breaking change will have the scope prefixed on to the breaking change paragraph. Parsing errors will return a ParseError object and ultimately a commit of type ``"unknown"``. Unknown commits are consolidated into an "Unknown" section in the changelog by the default template. To remove unwanted commits from the changelog that normally are placed in the "unknown" section, consider the use of the configuration option :ref:`changelog.exclude_commit_patterns ` to ignore those commit styles. - **Pull/Merge Request Identifier Detection**: This parser implements PSR's :ref:`commit_parser-builtin-linked_merge_request_detection` to identify and extract pull/merge request numbers. The parser will return a string value if a pull/merge request number is found in the commit message. If no pull/merge request number is found, the parser will return an empty string. - **Linked Issue Identifier Detection**: This parser implements PSR's :ref:`commit_parser-builtin-issue_number_detection` to identify and extract issue numbers. The parser will return a tuple of issue numbers as strings if any are found in the commit message. If no issue numbers are found, the parser will return an empty tuple. - **Squash Commit Evaluation**: This parser implements PSR's :ref:`commit_parser-builtin-squash_commit_evaluation` to identify and extract each commit message as a separate commit message within a single squashed commit. You can toggle this feature on/off via the :ref:`config-commit_parser_options` setting. - **Release Notice Footer Detection**: This parser implements PSR's :ref:`commit_parser-builtin-release_notice_footer_detection`, which is a custom extension to traditional `Conventional Commits Specification`_ to use the ``NOTICE`` keyword as a git footer to document additional release information that is not considered a breaking change. **Limitations**: - Commits with the ``revert`` type are not currently supported. Track the implementation of this feature in the issue `#402`_. If no commit parser options are provided via the configuration, the parser will use PSR's built-in :py:class:`defaults `. .. _#402: https://github.com/python-semantic-release/python-semantic-release/issues/402 .. _Conventional Commits Specification: https://www.conventionalcommits.org/en/v1.0.0 ---- .. _commit_parser-builtin-conventional-monorepo: Conventional Commits Monorepo Parser """""""""""""""""""""""""""""""""""" *Introduced in v10.4.0* .. important:: In order for this parser to be effective, please review the section titled :ref:`monorepos` for details on file structure, configurations, and release actions. This parser is an extension of the :ref:`commit_parser-builtin-conventional`, designed specifically for monorepo environments. A monorepo environment is defined as a single source control repository that contains multiple packages, each of which can be released independently and may have different version numbers. This parser introduces two new configuration options that determine which packages are affected by a commit. These options control whether a commit is considered for version determination, changelog generation, and other release actions for the relevant packages. The 2 new configuration options are :py:class:`path_filters ` and :py:class:`scope_prefix `. **Features**: - **Package Specific Commit Filtering**: For monorepo support, this parser uses 2 filtering rules to determine if a commit should be considered for a specific package. The first rule is based on file paths that are changed in the commit and the second rule is based on the optional scope prefix defined in the commit message. If either rule matches, then the commit is considered relevant to that package and will be used in version determination, changelog generation, etc, for that package. If neither rule matches, then the commit is ignored for that package. File path filtering rules are applied first and are the primary way to determine package relevance. The :py:class:`path_filters ` option allows for specifying a list of file path patterns and will also support negated patterns to ignore specific paths that otherwise would be selected from the file glob pattern. Negated patterns are defined by prefixing the pattern with an exclamation point (``!``). File path filtering is quite effective by itself but to handle the edge cases, the parser has the :py:class:`scope_prefix ` configuration option to allow the developer to specifically define when the commit is relevant to the package. In monorepo setups, there are often shared files between packages (generally at the root project level) that are modified occasionally but not always relevant to the package being released. Since you do not want to define this path in the package configuration as it may not be relevant to the release, then this parser will look for a match with the scope prefix. The scope prefix is a regular expression that is used to match the text inside the scope field of a Conventional Commit. The scope prefix is optional and is used only if file path filtering does not match. Commits that have matching files in the commit will be considered relevant to the package **regardless** if a scope prefix exists or if it matches. - **Version Bump Determination**: Once package-specific commit filtering is applied, the relevant commits are passed to the Conventional Commits Parser for evaluation and then used for version bump determination. See :ref:`commit_parser-builtin-conventional` for details. - **Changelog Generation**: Once package-specific commit filtering is applied, the relevant commits are passed to the Conventional Commits Parser for evaluation and then used for changelog generation. See :ref:`commit_parser-builtin-conventional` for details. - **Pull/Merge Request Identifier Detection**: Once package-specific commit filtering is applied, the relevant commits are passed to the Conventional Commits Parser for pull/merge request identifier detection. See :ref:`commit_parser-builtin-linked_merge_request_detection` for details. - **Linked Issue Identifier Detection**: Once package-specific commit filtering is applied, the relevant commits are passed to the Conventional Commits Parser for linked issue identifier detection. See :ref:`commit_parser-builtin-issue_number_detection` for details. - **Squash Commit Evaluation**: Squashed commits are separated out into individual commits with the same set of changed files **BEFORE** the package-specific commit filtering is applied. Each pseudo-commit is then subjected to the same filtering rules as regular commits. See :ref:`commit_parser-builtin-squash_commit_evaluation` for details. - **Release Notice Footer Detection**: Once package-specific commit filtering is applied, the relevant commits are passed to the Conventional Commits Parser for release notice footer detection. See :ref:`commit_parser-builtin-release_notice_footer_detection` for details. **Limitations**: - ``revert`` commit type is NOT supported, see :ref:`commit_parser-builtin-conventional`'s limitations for details. If no commit parser options are provided via the configuration, the parser will use PSR's built-in :py:class:`defaults `. ---- .. _commit_parser-builtin-angular: Angular Commit Parser """"""""""""""""""""" .. warning:: This parser was deprecated in ``v9.19.0``. It will be removed in a future release. This parser is being replaced by the :ref:`commit_parser-builtin-conventional`. A parser that is designed to parse commits formatted according to the `Angular Commit Style Guidelines`_. The parser is implemented with the following logic in relation to how PSR's core features: - **Version Bump Determination**: This parser extracts the commit type from the subject line of the commit (the first line of a commit message). This type is matched against the configuration mapping to determine the level bump for the specific commit. If the commit type is not found in the configuration mapping, the commit is considered a non-parsable commit and will return it as a ParseError object and ultimately a commit of type ``"unknown"``. The configuration mapping contains lists of commit types that correspond to the level bump for each commit type. Some commit types are still valid do not trigger a level bump, such as ``"chore"`` or ``"docs"``. You can also configure the default level bump :ref:`commit_parser_options.default_level_bump ` if desired. To trigger a major release, the commit message body must contain a paragraph that begins with ``BREAKING CHANGE:``. This will override the level bump determined by the commit type. - **Changelog Generation**: PSR will group commits in the changelog by the commit type used in the commit message. The commit type shorthand is converted to a more human-friendly section heading and then used as the version section title of the changelog and release notes. Under the section title, the parsed commit descriptions are listed out in full. If the commit includes an optional scope, then the scope is prefixed on to the first line of the commit description. If a commit has any breaking change prefixed paragraphs in the commit message body, those paragraphs are separated out into a "Breaking Changes" section in the changelog (Breaking Changes section is available from the default changelog in v9.15.0). Each breaking change paragraph is listed in a bulleted list format across the entire version. A single commit is allowed to have more than one breaking change prefixed paragraph (as opposed to the `Angular Commit Style Guidelines`_). Commits with an optional scope and a breaking change will have the scope prefixed on to the breaking change paragraph. Parsing errors will return a ParseError object and ultimately a commit of type ``"unknown"``. Unknown commits are consolidated into an "Unknown" section in the changelog by the default template. To remove unwanted commits from the changelog that normally are placed in the "unknown" section, consider the use of the configuration option :ref:`changelog.exclude_commit_patterns ` to ignore those commit styles. - **Pull/Merge Request Identifier Detection**: This parser implements PSR's :ref:`commit_parser-builtin-linked_merge_request_detection` to identify and extract pull/merge request numbers. The parser will return a string value if a pull/merge request number is found in the commit message. If no pull/merge request number is found, the parser will return an empty string. *Feature available in v9.13.0+.* - **Linked Issue Identifier Detection**: This parser implements PSR's :ref:`commit_parser-builtin-issue_number_detection` to identify and extract issue numbers. The parser will return a tuple of issue numbers as strings if any are found in the commit message. If no issue numbers are found, the parser will return an empty tuple. *Feature available in v9.15.0+.* - **Squash Commit Evaluation**: This parser implements PSR's :ref:`commit_parser-builtin-squash_commit_evaluation` to identify and extract each commit message as a separate commit message within a single squashed commit. You can toggle this feature on/off via the :ref:`config-commit_parser_options` setting. *Feature available in v9.17.0+.* - **Release Notice Footer Detection**: This parser implements PSR's :ref:`commit_parser-builtin-release_notice_footer_detection`, which is a custom extension to traditional `Angular Commit Style Guidelines`_ to use the ``NOTICE`` keyword as a git footer to document additional release information that is not considered a breaking change. *Feature available in v9.18.0+.* **Limitations**: - Commits with the ``revert`` type are not currently supported. Track the implementation of this feature in the issue `#402`_. If no commit parser options are provided via the configuration, the parser will use PSR's built-in :py:class:`defaults `. .. _#402: https://github.com/python-semantic-release/python-semantic-release/issues/402 .. _Angular Commit Style Guidelines: https://github.com/angular/angular.js/blob/master/DEVELOPERS.md#commits ---- .. _commit_parser-builtin-emoji: Emoji Commit Parser """"""""""""""""""" A parser that is designed to parse commits formatted to the `Gitmoji Specification`_ with a few additional features that the specification does not cover but provide similar functionality expected from a Semantic Release tool. As the `Gitmoji Specification`_ describes, the emojis can be specified in either the unicode format or the shortcode text format. See the `Gitmoji Specification`_ for the pros and cons for which format to use, but regardless, the configuration options must match the format used in the commit messages. The parser is implemented with the following logic in relation to how PSR's core features: - **Version Bump Determination**: This parser only looks for emojis in the subject line of the commit (the first line of a commit message). If more than one emoji is found, the emoji configured with the highest priority is selected for the change impact for the specific commit. The emoji with the highest priority is the one configured in the ``major`` configuration option, followed by the ``minor``, and ``patch`` in descending priority order. If no emoji is found in the subject line, the commit is classified as other and will default to the level bump defined by the configuration option :ref:`commit_parser_options.default_level_bump `. - **Changelog Generation**: PSR will group commits in the changelog by the emoji type used in the commit message. The emoji is used as the version section title and the commit descriptions are listed under that section. No emojis are removed from the commit message so each will be listed in the changelog and release notes. When more than one emoji is found in the subject line of a commit, the emoji with the highest priority is the one that will influence the grouping of the commit in the changelog. Commits containing no emojis or non-configured emojis are consolidated into an "Other" section. To remove unwanted commits from the changelog that would normally be added into the "other" section, consider the use of the configuration option :ref:`changelog.exclude_commit_patterns ` to ignore those commit styles. - **Pull/Merge Request Identifier Detection**: This parser implements PSR's :ref:`commit_parser-builtin-linked_merge_request_detection` to identify and extract pull/merge request numbers. The parser will return a string value if a pull/merge request number is found in the commit message. If no pull/merge request number is found, the parser will return an empty string. *Feature available in v9.13.0+.* - **Linked Issue Identifier Detection**: [Disabled by default] This parser implements PSR's :ref:`commit_parser-builtin-issue_number_detection` to identify and extract issue numbers. The parser will return a tuple of issue numbers as strings if any are found in the commit message. If no issue numbers are found, the parser will return an empty tuple. This feature is disabled by default since it is not a part of the `Gitmoji Specification`_ but can be enabled by setting the configuration option ``commit_parser_options.parse_linked_issues`` to ``true``. *Feature available in v9.15.0+.* - **Squash Commit Evaluation**: This parser implements PSR's :ref:`commit_parser-builtin-squash_commit_evaluation` to identify and extract each commit message as a separate commit message within a single squashed commit. You can toggle this feature on/off via the :ref:`config-commit_parser_options` setting. *Feature available in v9.17.0+.* - **Release Notice Footer Detection**: This parser implements PSR's :ref:`commit_parser-builtin-release_notice_footer_detection`, which is a custom extension that uses the ``NOTICE`` keyword as a git footer to document additional release information that is not considered a breaking change. *Feature available in v9.18.0+.* If no commit parser options are provided via the configuration, the parser will use PSR's built-in :py:class:`defaults `. .. _Gitmoji Specification: https://gitmoji.dev/specification ---- .. _commit_parser-builtin-scipy: Scipy Commit Parser """"""""""""""""""" A parser that is designed to parse commits formatted according to the `Scipy Commit Style Guidelines`_. This is essentially a variation of the `Angular Commit Style Guidelines`_ with all different commit types. Because of this small variance, this parser only extends our :ref:`commit_parser-builtin-angular` parser with pre-defined scipy commit types in the default Scipy Parser Options and all other features are inherited. **Limitations**: - Commits with the ``REV`` type are not currently supported. Track the implementation of this feature in the issue `#402`_. If no commit parser options are provided via the configuration, the parser will use PSR's built-in :py:class:`defaults `. .. _Scipy Commit Style Guidelines: https://scipy.github.io/devdocs/dev/contributor/development_workflow.html#writing-the-commit-message ---- .. _commit_parser-builtin-tag: Tag Commit Parser """"""""""""""""" .. warning:: This parser was deprecated in ``v9.12.0``. It will be removed in a future release. The original parser from v1.0.0 of Python Semantic Release. Similar to the emoji parser above, but with less features. If no commit parser options are provided via the configuration, the parser will use PSR's built-in :py:class:`defaults `. ---- .. _commit_parser-builtin-linked_merge_request_detection: Common Linked Merge Request Detection """"""""""""""""""""""""""""""""""""" *Introduced in v9.13.0* All of the PSR built-in parsers implement common pull/merge request identifier detection logic to extract pull/merge request numbers from the commit message regardless of the VCS platform. The parsers evaluate the subject line for a parenthesis-enclosed number at the end of the line. PSR's parsers will return a string value if a pull/merge request number is found in the commit message. If no pull/merge request number is found, the parsers will return an empty string. **Examples**: *All of the following will extract a MR number of "x123", where 'x' is the character prefix* 1. BitBucket: ``Merged in feat/my-awesome-feature (pull request #123)`` 2. GitHub: ``feat: add new feature (#123)`` 3. GitLab: ``fix: resolve an issue (!123)`` ---- .. _commit_parser-builtin-issue_number_detection: Common Issue Identifier Detection """"""""""""""""""""""""""""""""" *Introduced in v9.15.0* All of the PSR built-in parsers implement common issue identifier detection logic, which is similar to many VCS platforms such as GitHub, GitLab, and BitBucket. The parsers will look for common issue closure text prefixes in the `Git Trailer format`_ in the commit message to identify and extract issue numbers. The detection logic is not strict to any specific issue tracker as we try to provide a flexible approach to identifying issue numbers but in order to be flexible, it is **required** to the use the `Git Trailer format`_ with a colon (``:``) as the token separator. PSR attempts to support all variants of issue closure text prefixes, but not all will work for your VCS. PSR supports the following case-insensitive prefixes and their conjugations (plural, present, & past tense): - close (closes, closing, closed) - fix (fixes, fixing, fixed) - resolve (resolves, resolving, resolved) - implement (implements, implementing, implemented) PSR also allows for a more flexible approach to identifying more than one issue number without the need of extra git trailers (although PSR does support multiple git trailers). PSR support various list formats which can be used to identify more than one issue in a list. This format will not necessarily work on your VCS. PSR currently support the following list formats: - comma-separated (ex. ``Closes: #123, #456, #789``) - space-separated (ex. ``resolve: #123 #456 #789``) - semicolon-separated (ex. ``Fixes: #123; #456; #789``) - slash-separated (ex. ``close: #123/#456/#789``) - ampersand-separated (ex. ``Implement: #123 & #789``) - and-separated (ex. ``Resolve: #123 and #456 and #789``) - mixed (ex. ``Closed: #123, #456, and #789`` or ``Fixes: #123, #456 & #789``) All the examples above use the most common issue number prefix (``#``) but PSR is flexible to support other prefixes used by VCS platforms or issue trackers such as JIRA (ex. ``ABC-###``). The parsers will return a tuple of issue numbers as strings if any are found in the commit message. Strings are returned to ensure that the any issue number prefix characters are preserved (ex. ``#123`` or ``ABC-123``). If no issue numbers are found, the parsers will return an empty tuple. **References**: - `BitBucket: Resolving Issues Automatically `_ - `GitHub: Linking Issue to PR `_ - `GitLab: Default Closing Patterns `_ .. _Git Trailer format: https://git-scm.com/docs/git-interpret-trailers ---- .. _commit_parser-builtin-release_notice_footer_detection: Common Release Notice Footer Detection """""""""""""""""""""""""""""""""""""" *Introduced in v9.18.0** All of the PSR built-in parsers implement common release notice footer detection logic to identify and extract a ``NOTICE`` git trailer that documents any additional release information the developer wants to provide to the software consumer. The idea extends from the concept of the ``BREAKING CHANGE:`` git trailer to document any breaking change descriptions but the ``NOTICE`` trailer is intended to document any information that is below the threshold of a breaking change while still important for the software consumer to be aware of. Common uses would be to provide deprecation warnings or more detailed change usage information for that release. Parsers will collapse single newlines after the ``NOTICE`` trailer into a single line paragraph. Commits may have more than one ``NOTICE`` trailer in a single commit message. Each :py:class:`ParsedCommit ` will have a ``release_notices`` attribute that is a tuple of string paragraphs to identify each release notice. In the default changelog and release notes template, these release notices will be formatted into their own section called **Additional Release Information**. Each will include any commit scope defined and each release notice in alphabetical order. ---- .. _commit_parser-builtin-squash_commit_evaluation: Common Squash Commit Evaluation """"""""""""""""""""""""""""""" *Introduced in v9.17.0* All of the PSR built-in parsers implement common squash commit evaluation logic to identify and extract individual commit messages from a single squashed commit. The parsers will look for common squash commit delimiters and multiple matches of the commit message format to identify each individual commit message that was squashed. The parsers will return a list containing each commit message as a separate commit object. Squashed commits will be evaluated individually for both the level bump and changelog generation. If no squash commits are found, a list with the single commit object will be returned. Currently, PSR has been tested against GitHub, BitBucket, and official ``git`` squash merge commit messages. GitLab does not have a default template for squash commit messages but can be customized per project or server. If you are using GitLab, you will need to ensure that the squash commit message format is similar to the example below. **Example**: *The following example will extract three separate commit messages from a single GitHub formatted squash commit message of conventional commit style:* .. code-block:: text feat(config): add new config option (#123) * refactor(config): change the implementation of config loading * docs(configuration): defined new config option for the project When parsed with the default conventional-commit parser with squash commits toggled on, the version bump will be determined by the highest level bump of the three commits (in this case, a minor bump because of the feature commit) and the release notes would look similar to the following: .. code-block:: markdown ## Features - **config**: add new config option (#123) ## Documentation - **configuration**: defined new config option for the project (#123) ## Refactoring - **config**: change the implementation of config loading (#123) Merge request numbers and commit hash values will be the same across all extracted commits. Additionally, any :ref:`config-changelog-exclude_commit_patterns` will be applied individually to each extracted commit so if you are have an exclusion match for ignoring ``refactor`` commits, the second commit in the example above would be excluded from the changelog. .. important:: When squash commit evaluation is enabled, if you squashed a higher level bump commit into the body of a lower level bump commit, the higher level bump commit will be evaluated as the level bump for the entire squashed commit. This includes breaking change descriptions. ---- .. _commit_parser-builtin-customization: Customization """"""""""""" Each of the built-in parsers can be customized by providing overrides in the :ref:`config-commit_parser_options` setting of the configuration file. This can be used to toggle parsing features on and off or to add, modify, or remove the commit types that are used to determine the level bump for a commit. Review the API documentation for the specific parser's options class to see what changes to the default behavior can be made. ---- .. _commit_parser-custom_parser: Custom Parsers -------------- Custom parsers can be written to handle commit message styles that are not covered by the built-in parsers or by option customization of the built-in parsers. Python Semantic Release provides several building blocks to help you write your parser. To maintain compatibility with how Python Semantic Release will invoke your parser, you should use the appropriate object as described below, or create your own object as a subclass of the original which maintains the same interface. Type parameters are defined where appropriate to assist with static type-checking. The :ref:`commit_parser ` option, if set to a string which does not match one of Python Semantic Release's built-in commit parsers, will be used to attempt to dynamically import a custom commit parser class. In order to use your custom parser, you must provide how to import the module and class via the configuration option. There are two ways to provide the import string: 1. **File Path & Class**: The format is ``"path/to/module_file.py:ClassName"``. This is the easiest way to provide a custom parser. This method allows you to store your custom parser directly in the repository with no additional installation steps. PSR will locate the file, load the module, and instantiate the class. Relative paths are recommended and it should be provided relative to the current working directory. This import variant is available in v9.16.0 and later. 2. **Module Path & Class**: The format is ``"package.module_name:ClassName"``. This method allows you to store your custom parser in a package that is installed in the same environment as PSR. This method is useful if you want to share your custom parser across multiple repositories. To share it across multiple repositories generally you will need to publish the parser as its own separate package and then ``pip install`` it into the current virtual environment. You can also keep it in the same repository as your project as long as it is in the current directory of the virtual environment and is locatable by the Python import system. You may need to set the ``PYTHONPATH`` environment variable if you have a more complex directory structure. This import variant is available in v8.0.0 and later. To test that your custom parser is importable, you can run the following command in the directory where PSR will be executed: .. code-block:: bash python -c "from package.module_name import ClassName" .. note:: Remember this is basic python import rules so the package name is optional and generally packages are defined by a directory with ``__init__.py`` files. .. _commit_parser-tokens: Tokens """""" The tokens built into Python Semantic Release's commit parsing mechanism are inspired by both the error-handling mechanism in `Rust's error handling`_ and its implementation in `black`_. It is documented that `catching exceptions in Python is slower`_ than the equivalent guard implemented using ``if/else`` checking when exceptions are actually caught, so although ``try/except`` blocks are cheap if no exception is raised, commit parsers should always return an object such as :py:class:`ParseError ` instead of raising an error immediately. This is to avoid catching a potentially large number of parsing errors being caught as the commit history of a repository is being parsed. Python Semantic Release does not raise an exception if a commit cannot be parsed. Python Semantic Release uses :py:class:`ParsedCommit ` as the return type of a successful parse operation, and :py:class:`ParseError ` as the return type from an unsuccessful parse of a commit. You should review the API documentation linked to understand the fields available on each of these objects. It is important to note, the :py:class:`ParseError ` implements an additional method, ``raise_error``. This method raises a :py:class:`CommitParseError ` with the message contained in the ``error`` field, as a convenience. In Python Semantic Release, the type ``semantic_release.commit_parser.token.ParseResult`` is defined as ``ParseResultType[ParsedCommit, ParseError]``, as a convenient shorthand. :py:class:`ParseResultType ` is a generic type, which is the ``Union`` of its two type parameters. One of the types in this union should be the type returned on a successful parse of the ``commit``, while the other should be the type returned on an unsuccessful parse of the ``commit``. A custom parser result type, therefore, could be implemented as follows: * ``MyParsedCommit`` subclasses :py:class:`ParsedCommit ` * ``MyParseError`` subclasses :py:class:`ParseError ` * ``MyParseResult = ParseResultType[MyParsedCommit, MyParseError]`` Internally, Python Semantic Release uses ``isinstance()`` to determine if the result of parsing a commit was a success or not, so you should check that your custom result and error types return ``True`` from ``isinstance(, ParsedCommit)`` and ``isinstance(, ParseError)`` respectively. While it's not advisable to remove any of the fields that are available in the built-in token types, currently only the ``bump`` field of the successful result type is used to determine how the version should be incremented as part of this release. However, it's perfectly possible to add additional fields to your tokens which can be populated by your parser; these fields will then be available on each commit in your :ref:`changelog template `, so you can make additional information available. .. _Rust's error handling: https://doc.rust-lang.org/book/ch09-02-recoverable-errors-with-result.html .. _black: https://github.com/psf/black/blob/main/src/black/rusty.py .. _catching exceptions in Python is slower: https://docs.python.org/3/faq/design.html#how-fast-are-exceptions .. _namedtuple: https://docs.python.org/3/library/typing.html#typing.NamedTuple .. _commit_parser-parser-options: Parser Options """""""""""""" When writing your own parser, you should accompany the parser with an "options" class which accepts the appropriate keyword arguments. This class' ``__init__`` method should store the values that are needed for parsing appropriately. Python Semantic Release will pass any configuration options from the configuration file's :ref:`commit_parser_options `, into your custom parser options class. To ensure that the configuration options are passed correctly, the options class should inherit from the :py:class:`ParserOptions ` class. The "options" class is used to validate the options which are configured in the repository, and to provide default values for these options where appropriate. .. _commit_parsing-commit-parsers: Commit Parsers """""""""""""" The commit parsers that are built into Python Semantic Release implement an instance method called ``parse``, which takes a single parameter ``commit`` of type `git.objects.commit.Commit `_, and returns the type ``ParseResultType``. To be compatible with Python Semantic Release, a commit parser must subclass :py:class:`CommitParser `. A subclass must implement the following: * A class-level attribute ``parser_options``, which must be set to :py:class:`ParserOptions ` or a subclass of this. * An ``__init__`` method which takes a single parameter, ``options``, that should be of the same type as the class' ``parser_options`` attribute. * A method, ``parse``, which takes a single parameter ``commit`` that is of type `git.objects.commit.Commit `_, and returns :py:class:`ParseResult `, or a subclass of this. By default, the constructor for :py:class:`CommitParser ` will set the ``options`` parameter on the ``options`` attribute of the parser, so there is no need to override this in order to access ``self.options`` during the ``parse`` method. However, if you have any parsing logic that needs to be done only once, it may be a good idea to perform this logic during parser instantiation rather than inside the ``parse`` method. The parse method will be called once per commit in the repository's history during parsing, so the effect of slow parsing logic within the ``parse`` method will be magnified significantly for projects with sizeable Git histories. Commit Parsers have two type parameters, "TokenType" and "OptionsType". The first is the type which is returned by the ``parse`` method, and the second is the type of the "options" class for this parser. Therefore, a custom commit parser could be implemented via: .. code-block:: python class MyParserOptions(semantic_release.ParserOptions): def __init__(self, message_prefix: str) -> None: self.prefix = message_prefix * 2 class MyCommitParser( semantic_release.CommitParser[semantic_release.ParseResult, MyParserOptions] ): def parse(self, commit: git.objects.commit.Commit) -> semantic_release.ParseResult: ... .. _gitpython-commit-object: https://gitpython.readthedocs.io/en/stable/reference.html#module-git.objects.commit python-semantic-release-10.4.1/docs/concepts/getting_started.rst000066400000000000000000000373761506116242600250470ustar00rootroot00000000000000.. _getting-started-guide: Getting Started =============== If you haven't done so already, install Python Semantic Release locally following the :ref:`installation instructions `. If you are using a CI/CD service, you may not have to add Python Semantic Release to your project's dependencies permanently, but for the duration of this guide for the initial setup, you will need to have it installed locally. Configuring PSR --------------- Python Semantic Release ships with a reasonable default configuration but some aspects **MUST** be customized to your project. To view the default configuration, run the following command: .. code-block:: bash semantic-release generate-config The output of the above command is the default configuration in TOML format without any modifications. If this is fine for your project, then you do not need to configure anything else. PSR accepts overrides to the default configuration keys individually. If you don't define the key-value pair in your configuration file, the default value will be used. By default, Python Semantic Release will look for configuration overrides in ``pyproject.toml`` under the TOML table ``[tool.semantic_release]``. You may specify a different file using the ``-c/--config`` option, for example: .. code-block:: bash # In TOML format with top level table [semantic_release] semantic-release -c releaserc.toml # In JSON format with top level object key {"semantic_release": {}} semantic-release -c releaserc.json The easiest way to get started is to output the default configuration to a file, delete keys you do not need to override, and then edit the remaining keys to suit your project. To set up in ``pyproject.toml``, run the following command: .. code-block:: bash # In file redirect in bash semantic-release generate-config --pyproject >> pyproject.toml # Open your editor to edit the configuration vim pyproject.toml .. seealso:: - :ref:`cmd-generate-config` - :ref:`configuration` Configuring the Version Stamp Feature ''''''''''''''''''''''''''''''''''''' One of the best features of Python Semantic Release is the ability to automatically stamp the new version number into your project files, so you don't have to manually update the version upon each release. The version that is stamped is automatically determined by Python Semantic Release from your commit messages which compliments automated versioning seamlessly. The most crucial version stamp is the one in your project metadata, which is used by the Python Package Index (PyPI) and other package managers to identify the version of your package. For Python projects, this is typically the ``version`` field in your ``pyproject.toml`` file. First, set up your project metadata with the base ``version`` value. If you are starting with a brand new project, set ``project.version = "0.0.0"``. If you are working on an existing project, set it to the last version number you released. Do not include any ``v`` prefix. .. important:: The version number must be a valid SemVer version, which means it should follow the format ``MAJOR.MINOR.PATCH`` (e.g., ``1.0.0``). Python Semantic Release does NOT support Canonical version values defined in the `PEP 440`_ specification at this time. See `Issue #455 `_ for more details. Note that you can still define a SemVer version in the ``project.version`` field, and when your build is generated, the build tool will automatically generate a PEP 440 compliant version as long as you do **NOT** use a non-pep440 compliant pre-release token. .. _PEP 440: https://peps.python.org/pep-0440/ Your project metadata might look like this in ``pyproject.toml``:: [project] name = "my-package" version = "0.0.0" # Set this to the last released version or "0.0.0" for new projects description = "A sample Python package" To configure PSR to automatically update this version number, you need to specify the file and value to update in your configuration. Since ``pyproject.toml`` uses TOML format, you will add the replacement specification to the ``tool.semantic_release.version_toml`` list. Update the following configuration in your ``pyproject.toml`` file to include the version variable location: .. code-block:: toml [tool.semantic_release] version_toml = ["pyproject.toml:project.version"] # Alternatively, if you are using poetry's 'version' key, then you would use: version_toml = ["pyproject.toml:tool.poetry.version"] If you have other TOML files where you want to stamp the version, you can add them to the ``version_toml`` list as well. In the above example, there is an implicit assumption that you only want the version as the raw number format. If you want to specify the full tag value (e.g. v-prefixed version), then include ``:tf`` for "tag format" at the end of the version variable specification. For non-TOML formatted files (such as JSON or YAML files), you can use the :ref:`config-version_variables` configuration key instead. This feature uses an advanced Regular Expression to find and replace the version variable in the specified files. For Python files, its much more effective to use ``importlib`` instead which will allow you to dynamically import the version from your package metadata and not require your project to commit the version number bump to the repository. For example, in your package's base ``__init__.py`` .. code-block:: python # my_package/__init__.py from importlib.metadata import version as get_version __version__ = get_version(__package__) # Note: __package__ must match your 'project.name' as defined in pyproject.toml .. seealso:: - Configuration specification of :ref:`config-version_toml` - Configuration specification of :ref:`config-version_variables` Using PSR to Build your Project ''''''''''''''''''''''''''''''' PSR provides a convenient way to build your project artifacts as part of the versioning process now that you have stamped the version into your project files. To enable this, you will need to specify the build command in your configuration. This command will be executed after the next version has been determined, and stamped into your files but before a release tag has been created. To set up the build command, add the following to your ``pyproject.toml`` file: .. code-block:: toml [tool.semantic_release] build_command = "python -m build --sdist --wheel ." .. seealso:: - :ref:`config-build_command` - Configuration specification for the build command. - :ref:`config-build_command_env` - Configuration specification for the build command environment variables. Choosing a Commit Message Parser '''''''''''''''''''''''''''''''' PSR uses commit messages to determine the type of version bump that should be applied to your project. PSR supports multiple commit message parsing styles, allowing you to choose the one that best fits your project's needs. Choose **one** of the supported commit parsers defined in :ref:`commit_parsing`, or provide your own and configure it in your ``pyproject.toml`` file. Each commit parser has its own default configuration options so if you want to customize the parser behavior, you will need to specify the parser options you want to override. .. code-block:: toml [tool.semantic_release] commit_parser = "conventional" [tool.semantic_release.commit_parser_options] minor_tags = ["feat"] patch_tags = ["fix", "perf"] parse_squash_commits = true ignore_merge_commits = true Choosing your Changelog ''''''''''''''''''''''' Prior to creating a release, PSR will generate a changelog from the commit messages of your project. The changelog is extremely customizable from the format to the content of each section. PSR ships with a default changelog template that will be used if you do not provide custom templates. The default should be sufficient for most projects and has its own set of configuration options. For basic customization, you can choose either an traditional Markdown formatted changelog (default) or if you want to integrate with a Sphinx Documentation project, you can use the reStructuredText (RST) format. You can also choose the file name and location of where to write the default changelog. To set your changelog location and changelog format, add the following to your ``pyproject.toml`` file: .. code-block:: toml [tool.semantic_release.changelog.default_templates] changelog_file = "docs/source/CHANGELOG.rst" output_format = "rst" # or "md" for Markdown format Secondly, the more important aspect of configuring your changelog is to define Commit Exclusion Patterns or patterns that will be used to filter out commits from the changelog. PSR does **NOT** (yet) come with a built-in set of exclusion patterns, so you will need to define them yourself. These commit patterns should be in line with your project's commit parser configuration. To set commit exclusion patterns for a conventional commits parsers, add the following to your ``pyproject.toml`` file: .. code-block:: toml [tool.semantic_release.changelog] # Recommended patterns for conventional commits parser that is scope aware exclude_commit_patterns = [ '''chore(?:\([^)]*?\))?: .+''', '''ci(?:\([^)]*?\))?: .+''', '''refactor(?:\([^)]*?\))?: .+''', '''style(?:\([^)]*?\))?: .+''', '''test(?:\([^)]*?\))?: .+''', '''build\((?!deps\): .+)''', '''Initial [Cc]ommit.*''', ] .. seealso:: - :ref:`Changelog ` - Customize your changelog - :ref:`changelog.mode ` - Choose the changelog mode ('update' or 'init') - :ref:`changelog-templates-migrating-existing-changelog` Defining your Release Branches '''''''''''''''''''''''''''''' PSR provides a powerful feature to manage release types across multiple branches which can allow you to configure your project to have different release branches for different purposes, such as pre-release branches, beta branches, and your stable releases. .. note:: Most projects that do **NOT** publish pre-releases will be fine with PSR's built-in default. To define an alpha pre-release branch when you are working on a fix or new feature, you can add the following to your ``pyproject.toml`` file: .. code-block:: toml [tool.semantic_release.branches.alpha] # Matches branches with the prefixes 'feat/', 'fix/', or 'perf/'. match = "^(feat|fix|perf)/.+" prerelease = true prerelease_token = "alpha" Any time you execute ``semantic-release version`` on a branch with the prefix ``feat/``, ``fix/``, or ``perf/``, PSR will determine if a version bump is needed and if so, the resulting version will be a pre-release version with the ``alpha`` token. For example, +-----------+--------------+-----------------+-------------------+ | Branch | Version Bump | Current Version | Next Version | +===========+==============+=================+===================+ | main | Patch | ``1.0.0`` | ``1.0.1`` | +-----------+--------------+-----------------+-------------------+ | fix/bug-1 | Patch | ``1.0.0`` | ``1.0.1-alpha.1`` | +-----------+--------------+-----------------+-------------------+ .. seealso:: - :ref:`multibranch-releases` - Learn about multi-branch releases and how to configure them. Configuring VCS Releases '''''''''''''''''''''''' You can set up Python Semantic Release to create Releases in your remote version control system, so you can publish assets and release notes for your project. In order to do so, you will need to place an authentication token in the appropriate environment variable so that Python Semantic Release can authenticate with the remote VCS to push tags, create releases, or upload files. GitHub (``GH_TOKEN``) """"""""""""""""""""" For local publishing to GitHub, you should use a personal access token and store it in your environment variables. Specify the name of the environment variable in your configuration setting :ref:`remote.token `. The default is ``GH_TOKEN``. To generate a token go to https://github.com/settings/tokens and click on "Generate new token". For Personal Access Token (classic), you will need the ``repo`` scope to write (ie. push) to the repository. For fine-grained Personal Access Tokens, you will need the `contents`__ permission. __ https://docs.github.com/en/rest/authentication/permissions-required-for-fine-grained-personal-access-tokens#repository-permissions-for-contents GitLab (``GITLAB_TOKEN``) """"""""""""""""""""""""" A personal access token from GitLab. This is used for authenticating when pushing tags, publishing releases etc. This token should be stored in the ``GITLAB_TOKEN`` environment variable. Gitea (``GITEA_TOKEN``) """"""""""""""""""""""" A personal access token from Gitea. This token should be stored in the ``GITEA_TOKEN`` environment variable. Bitbucket (``BITBUCKET_TOKEN``) """"""""""""""""""""""""""""""" Bitbucket does not support uploading releases but can still benefit from automated tags and changelogs. The user has three options to push changes to the repository: #. Use SSH keys. #. Use an `App Secret`_, store the secret in the ``BITBUCKET_TOKEN`` environment variable and the username in ``BITBUCKET_USER``. #. Use an `Access Token`_ for the repository and store it in the ``BITBUCKET_TOKEN`` environment variable. .. _App Secret: https://support.atlassian.com/bitbucket-cloud/docs/push-back-to-your-repository/#App-secret .. _Access Token: https://support.atlassian.com/bitbucket-cloud/docs/repository-access-tokens .. seealso:: - :ref:`Changelog ` - customize your project's changelog. - :ref:`changelog-templates-custom_release_notes` - customize the published release notes - :ref:`version --vcs-release/--no-vcs-release ` - enable/disable VCS release creation. Testing your Configuration -------------------------- It's time to test your configuration! .. code-block:: bash # 1. Run the command in no-operation mode to see what would happen semantic-release -v --noop version # 2. If the output looks reasonable, try to run the command without any history changes # '-vv' will give you verbose debug output, which is useful for troubleshooting # commit parsing issues. semantic-release -vv version --no-commit --no-tag # 3. Evaluate your repository to see the changes that were made but not committed # - Check the version number in your pyproject.toml # - Check the distribution files from the build command # - Check the changelog file for the new release notes # 4. If everything looks good, make sure to commit/save your configuration changes git add pyproject.toml git commit -m "chore(config): configure Python Semantic Release" # 5. Now, try to run the release command with your history changes but without pushing semantic-release -v version --no-push --no-vcs-release # 6. Check the result on your local repository git status git log --graph --decorate --all # 7a. If you are happy with the release history and resulting commit & tag, # reverse your changes before trying the full release command. git tag -d v0.0.1 # replace with the actual version you released git reset --hard HEAD~1 # 7b. [Optional] Once you have configured a remote VCS token, try # running the full release command to update the remote repository. semantic-release version --push --vcs-release # This is optional as you may not want a personal access token set up or make # make the release permanent yet. .. seealso:: - :ref:`cmd-version` - :ref:`troubleshooting-verbosity` Configuring CI/CD ----------------- PSR is meant to help you release at speed! See our CI/CD Configuration guides under the :ref:`automatic` section. python-semantic-release-10.4.1/docs/concepts/index.rst000066400000000000000000000005421506116242600227500ustar00rootroot00000000000000.. _concepts: Concepts ======== This section covers the core concepts of Python Semantic Release and how it works. Understanding these concepts will help you effectively use Python Semantic Release in your projects. .. toctree:: :maxdepth: 1 getting_started commit_parsing changelog_templates multibranch_releases strict_mode python-semantic-release-10.4.1/docs/concepts/installation.rst000066400000000000000000000005041506116242600243400ustar00rootroot00000000000000.. _installation: Installation ============ .. code-block:: bash python3 -m pip install python-semantic-release semantic-release --help Python Semantic Release is also available from `conda-forge`_ or as a :ref:`GitHub Action `. .. _conda-forge: https://anaconda.org/conda-forge/python-semantic-release python-semantic-release-10.4.1/docs/concepts/multibranch_releases.rst000066400000000000000000000212441506116242600260360ustar00rootroot00000000000000.. _multibranch-releases: Multibranch Releases ==================== Python Semantic Release supports releases from multiple branches within your Git repository. You can elect to have a branch or set of branches create releases or `prereleases`_. There are no restrictions enforced on how you set up your releases, but be aware that if you create new releases from multiple branches, or prereleases from multiple independent branches using the same *prerelease token*, there is a chance that Python Semantic Release will calculate the next version to be the same on more than one branch (leading to an error that a Git tag already exists). .. note:: A "prerelease token" is the string used to suffix onto the 3-digit form of a full semantic version. For example, in the version ``1.2.3-beta.1``, the prerelease token is ``"beta"`` Typical strings used for pre-release tokens include "alpha", "beta", "dev" and "rc". These tend to indicate a level of maturity of the software associated with the version, but the specific meaning of each string is up to the project to decide. Generally, it's good practice to maintain a single branch from which full releases are made, and one branch at a time for each type of prerelease (alpha, beta, rc, etc). If you absolutely require tagging and (pre-)releases to take place from multiple branches where there's a risk that tags could conflict between branches, you can use the :ref:`--build-metadata ` command line argument to attach additional information (such as the branch name) to the tag in order to uniquely distinguish it from any other tags that might be calculated against other branches. Such a situation may occur in the following scenario: .. code-block:: O ----------- O <---- feature-1 / "feat: abc" / O -------- O --------------- O <---- main v1.0.0 v1.1.0 \ O ----------- O <---- feature-2 "feat: 123" Suppose that Python Semantic Release has been configured to use the same prerelease token ``"alpha"`` for all ``feature-*`` branches, and the default tag format ``"v{version}"``. In this case, running a pre-release from branch ``feature-1`` will recognize that since the last release, ``1.1.0``, a **feature** has been introduced and therefore the next tag to be applied to ``feature-1`` will be ``v1.2.0-alpha.1``. However, suppose we then try to run a release against ``feature-2``. This will also recognize that a **feature** has been introduced against the last released version of ``v1.1.0`` and therefore will try to create the tag ``v1.2.0-alpha.1``, leading to an error as this tag was already created against ``feature-1``. To get around this issue, you can pass the branch name as part of the build metadata: .. code-block:: shell semantic-release version --build-metadata $(git branch --show-current) This would lead to the tag ``v1.2.0-alpha.1+feature-1`` and ``v1.2.0-alpha.1+feature-2`` being applied to branches ``feature-1`` and ``feature-2``, respectively. Note that "`build metadata MUST be ignored`_" per the semver specification when comparing two versions, so these two prereleases would be considered equivalent semantic versions, but when merged to the branch configured to produce full releases (``main``), if released separately the changes from each branch would be released in two versions that would be considered different according to the semver specification. .. note:: If you have tags in your Git repository that are not valid semantic versions (which have then been formatted into your :ref:`tag_format `), these tags will be ignored for the purposes of calculating the next version. .. _prereleases: https://semver.org/#spec-item-9 .. _build metadata MUST be ignored: https://semver.org/#spec-item-10 .. _multibranch-releases-configuring: Configuring Multibranch Releases -------------------------------- Within your configuration file, you can create one or more groups of branches (*"release groups"*) that produce a certain type of release. Options are configured at the group level, and the group to use is chosen based on the *current branch name* against which Python Semantic Release is running. Each release group is configured as a nested mapping under the ``tool.semantic_release.branches`` key in ``pyproject.toml``, or the equivalent structure in other formats. the mapping requires a single key that is used as a name for the release group, which can help to identify it in log messages but has no effect on the behavior of the release. For example, Python Semantic Release has only one release group by default with the name ``main``. Inside each release group, the following key-value pairs can be set: +----------------------+----------+-----------+--------------------------------------------------------+ | Key | Required | Default | Description | +----------------------+----------+-----------+--------------------------------------------------------+ | match | Yes | N/A | A `Python regular expression`_ to match against the | | | | | active branch's name. If the branch name matches the | | | | | provided regular expression, then this release group | | | | | is chosen to provide the other configuration settings | | | | | available. | +----------------------+----------+-----------+--------------------------------------------------------+ | prerelease | No | ``false`` | Whether or not branches in this release group should | | | | | a prerelease instead of a full release | +----------------------+----------+-----------+--------------------------------------------------------+ | prerelease_token | No | ``rc`` | If creating a prerelease, specify the string to be | | | | | used as a prerelease token in any new versions created | | | | | against this branch. | +----------------------+----------+-----------+--------------------------------------------------------+ .. _Python regular expression: https://docs.python.org/3/library/re.html .. warning:: If two release groups have overlapping "match" patterns, i.e. a the name of a branch could theoretically match both patterns, then the release group which is defined first in your configuration file is used. Because of this, it's recommended that you place release groups with more specific match patterns higher up in your configuration file than those with patterns that would match a broader range of branch names. For example, suppose a project currently on version ``1.22.4`` is working on a new major version. The project wants to create a branch called ``2.x.x`` against which they will develop the new major version, and they would like to create "release candidate" ("rc") prereleases from this branch. There are also a number of new features to integrate, and the project has agreed that all such branches should be named according to the convention ``next-{developer initials}-{issue number}``, leading to branches named similarly to ``next-bc-prj-123``. The project would like to release with tags that include some way to identify the branch and date on which the release was made from the tag. This project would be able to leverage the following configuration to achieve the above requirements from their release configuration: .. code-block:: toml [tool.semantic_release.branches.main] match = "(main|master)" prerelease = false [tool.semantic_release.branches."2.x.x"] match = "2.x.x" prerelease = true prerelease_token = "rc" [tool.semantic_release.branches."2.x.x New Features"] match = "next-\\w+-prj-\\d+" prerelease = true prerelease_token = "alpha" In a CI pipeline, the following command would allow attaching the date and branch name to the versions that are produced (note this example uses the UNIX ``date`` command): .. code-block:: bash semantic-release version \ --build-metadata "$(git branch --show-current).$(date +%Y%m%d)" This would lead to versions such as ``1.1.1+main.20221127`` or ``2.0.0-rc.4+2.x.x.20221201``. .. note:: Remember that is always possible to override the release rules configured by using the :ref:`cmd-version-option-force-level` and :ref:`cmd-version-option-as-prerelease` flags. python-semantic-release-10.4.1/docs/concepts/strict_mode.rst000066400000000000000000000045421506116242600241610ustar00rootroot00000000000000.. _strict-mode: Strict Mode =========== Strict Mode is enabled by use of the :ref:`strict ` parameter to the main command for Python Semantic Release. Strict Mode alters the behavior of Python Semantic Release when certain conditions are encountered that prevent Python Semantic Release from performing an action. Typically, this will result in a warning becoming an error, or a different exit code (0 vs non-zero) being produced when Python Semantic Release exits early. For example: .. code-block:: bash #!/usr/bin/bash set -euo pipefail git checkout $NOT_A_RELEASE_BRANCH pip install \ black \ isort \ twine \ pytest \ python-semantic-release isort . # sort imports black . # format the code pytest # test the code semantic-release --strict version # ERROR - not a release branch twine upload dist/* # publish the code Using Strict Mode with the ``--strict`` flag ensures this simple pipeline will fail while running ``semantic-release``, as the non-zero exit code will cause it to stop when combined with the ``-e`` option. Without Strict Mode, the ``semantic-release`` command will exit with code 0, causing the above pipeline to continue. The specific effects of enabling Strict Mode are detailed below. .. _strict-mode-not-a-release-branch: Non-Release Branches ~~~~~~~~~~~~~~~~~~~~ When running in Strict Mode, invoking Python Semantic Release on a non-Release branch will cause an error with a non-zero exit code. This means that you can prevent an automated script from running further against branches you do not want to release from, for example in multibranch CI pipelines. Running without Strict Mode will allow subsequent steps in the pipeline to also execute, but be aware that certain actions that Python Semantic Release may perform for you will likely not have been carried out, such as writing to files or creating a git commit in your repository. .. seealso:: - :ref:`multibranch-releases` .. _strict-mode-version-already-released: Version Already Released/No Release To Be Made ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When Strict Mode is not enabled and Python Semantic Release identifies that no release needs to be made, it will exit with code 0. You can cause Python Semantic Release to raise an error if no release needs to be made by enabling Strict Mode. python-semantic-release-10.4.1/docs/conf.py000066400000000000000000000046551506116242600206010ustar00rootroot00000000000000import os import sys from datetime import datetime, timezone sys.path.insert(0, os.path.abspath(".")) sys.path.insert(0, os.path.abspath("..")) import semantic_release # noqa: E402 author_name = "Python Semantic Release Team" # -- General configuration ------------------------------------------------ extensions = [ "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.coverage", "sphinx.ext.viewcode", "sphinxcontrib.apidoc", ] autodoc_default_options = {"ignore-module-all": True} templates_path = ["_templates"] source_suffix = ".rst" master_doc = "index" project = "python-semantic-release" current_year = datetime.now(timezone.utc).astimezone().year copyright = f"{current_year}, {author_name}" # noqa: A001 version = semantic_release.__version__ release = semantic_release.__version__ exclude_patterns = ["_build"] pygments_style = "sphinx" html_theme = "furo" htmlhelp_basename = "python-semantic-releasedoc" # -- Automatically run sphinx-apidoc -------------------------------------- docs_path = os.path.dirname(__file__) apidoc_output_dir = os.path.join(docs_path, "api", "modules") apidoc_module_dir = os.path.join(docs_path, "..", "src") apidoc_separate_modules = True apidoc_module_first = True apidoc_extra_args = ["-d", "3"] def setup(app): # type: ignore[no-untyped-def] # noqa: ARG001,ANN001,ANN201 pass # -- Options for LaTeX output --------------------------------------------- latex_documents = [ ( "index", "python-semantic-release.tex", "python-semantic-release Documentation", author_name, "manual", ), ] # -- Options for manual page output --------------------------------------- man_pages = [ ( "index", "python-semantic-release", "python-semantic-release Documentation", [author_name], 1, ) ] # -- Options for Texinfo output ------------------------------------------- texinfo_documents = [ ( "index", "python-semantic-release", "python-semantic-release Documentation", author_name, "python-semantic-release", "One line description of project.", "Miscellaneous", ), ] # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = "python-semantic-release" epub_author = author_name epub_publisher = author_name epub_copyright = copyright epub_exclude_files = ["search.html"] python-semantic-release-10.4.1/docs/configuration/000077500000000000000000000000001506116242600221375ustar00rootroot00000000000000python-semantic-release-10.4.1/docs/configuration/automatic-releases/000077500000000000000000000000001506116242600257265ustar00rootroot00000000000000python-semantic-release-10.4.1/docs/configuration/automatic-releases/cronjobs.rst000066400000000000000000000024001506116242600302730ustar00rootroot00000000000000.. _cronjobs: Cron Job Publishing =================== This is for you if for some reason you cannot publish from your CI or you would like releases to drop at a certain interval. Before you start, answer this: Are you sure you do not want a CI to release for you? (high version numbers are not a bad thing). The guide below is for setting up scheduled publishing on a server. It requires that the user that runs the cronjob has push access to the repository and upload access to an artifact repository. 1. Create a virtualenv:: virtualenv semantic_release -p `which python3` 2. Install python-semantic-release:: pip install python-semantic-release 3. Clone the repositories you want to have scheduled publishing. 3. Put the following in ``publish``:: VENV=semantic_release/bin $VENV/pip install -U pip python-semantic-release > /dev/null publish() { cd $1 git stash -u # ensures that there is no untracked files in the directory git fetch && git reset --hard origin/master $VENV/semantic-release version && $VENV/semantic-release publish cd .. } publish publish 4. Add cronjob:: /bin/bash -c "cd && source semantic_release/bin/activate && ./publish 2>&1 >> releases.log" python-semantic-release-10.4.1/docs/configuration/automatic-releases/github-actions.rst000066400000000000000000001053231506116242600314040ustar00rootroot00000000000000.. _gh_actions: GitHub Actions ============== There are two official GitHub Actions for Python Semantic Release: 1. :ref:`python-semantic-release/python-semantic-release@TAG ` This is the main action that runs the :ref:`version ` CLI command. It is used to (1) determine the next version number, (2) stamp the version number, (3) run the build command, (4) build the changelog, (5) commit the changes, (6) tag the commit, (7) publish the commit & tag and lastly (8) create a GitHub release. For more information review the :ref:`version command documentation ` and see :ref:`below ` for the Action configuration options. 2. :ref:`python-semantic-release/publish-action@TAG ` This action is used to execute the :ref:`publish ` CLI command. It is used to upload files, such as distribution artifacts and other assets, to a GitHub release. Included in this documentation are some recommended examples below if you want to get started quickly. These examples are not exhaustive and you will need to adjust them for your specific project needs especially if you are using a monorepo. - :ref:`GitHub Actions Example Workflows ` - :ref:`GitHub Actions with Monorepos ` .. note:: These GitHub Actions are only simplified wrappers around the python-semantic-release CLI. Ultimately, they download and install the published package from PyPI so if you find that you are trying to do something more advanced or less common, you may need to install and use the CLI directly. .. _gh_actions-psr: Python Semantic Release Action '''''''''''''''''''''''''''''' The official `Python Semantic Release GitHub Action`_ is a `GitHub Docker Action`_, which means at the beginning of the job it will build a Docker image that contains the Python Semantic Release package and its dependencies. It will then run the job step inside the Docker Container. This is done to ensure that the environment is consistent across all GitHub Runners regardless of platform. With this choice, comes some limitations of non-configurable options like a pre-defined python version, lack of installed build tools, and an inability to utilize caching. The primary benefit of using the GitHub Action is that it is easy to set up and use for most projects. We handle a lot of the git configuration under the hood, so you don't have to handle it yourself. There are a plenty of customization options available which are detailed individually below. Most importantly your project's configuration file will be used as normal, as your project will be mounted into the container for the action to use. .. _Python Semantic Release GitHub Action: https://github.com/marketplace/actions/python-semantic-release .. _GitHub Docker Action: https://docs.github.com/en/actions/sharing-automations/creating-actions/creating-a-docker-container-action .. seealso:: `action.yml`__: the code definition of the action __ https://github.com/python-semantic-release/python-semantic-release/blob/master/action.yml .. _gh_actions-psr-inputs: Inputs ------ GitHub Action inputs are used for select configuration and provide the necessary information to execute the action. The inputs are passed to the action using the ``with`` keyword in the workflow file. Many inputs will mirror the command line options available in the :ref:`version ` command. This section outlines each supported input and its purpose. ---- .. _gh_actions-psr-inputs-build: ``build`` """"""""" **Type:** ``Literal["true", "false"]`` Override whether the action should execute the build command or not. This option is equivalent to adding the command line switch ``--skip-build`` (when ``false``) to the :ref:`version ` command. If set to ``true``, no command line switch is passed and the default behavior of the :ref:`version ` is used. **Required:** ``false`` .. note:: If not set or set to ``true``, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-skip_build` option for the :ref:`version ` command. ---- .. _gh_actions-psr-inputs-build_metadata: ``build_metadata`` """""""""""""""""" **Type:** ``string`` Explicitly set the build metadata of the version. This is equivalent to running the command: .. code:: shell semantic-release version --build-metadata **Required:** ``false`` .. seealso:: - :ref:`cmd-version-option-build-metadata` option for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-changelog: ``changelog`` """"""""""""" **Type:** ``Literal["true", "false"]`` Override whether the action should generate a changelog or not. This option is equivalent to adding either ``--changelog`` (on ``true``) or ``--no-changelog`` (on ``false``) to the :ref:`version ` command. **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-changelog` options for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-commit: ``commit`` """""""""" **Type:** ``Literal["true", "false"]`` Override whether the action should commit any changes to the local repository. Changes include the version stamps, changelog, and any other files that are modified and added to the index during the build command. This option is equivalent to adding either ``--commit`` (on ``true``) or ``--no-commit`` (on ``false``) to the :ref:`version ` command. **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-commit` options for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-config_file: ``config_file`` """"""""""""""" Path to a custom semantic-release configuration file. By default, an empty string will look for to the ``pyproject.toml`` file in the current directory. This is the same as passing the ``-c`` or ``--config`` parameter to semantic-release. **Required:** ``false`` **Default:** ``""`` .. seealso:: - :ref:`cmd-main-option-config` for the :ref:`semantic-release ` command ---- .. _gh_actions-psr-inputs-directory: ``directory`` """"""""""""" If the project is not at the root of the repository (like in monorepos), you can specify a sub-directory to change into before running semantic-release. **Required:** ``false`` **Default:** ``.`` ---- .. _gh_actions-psr-inputs-force: ``force`` """"""""" **Type:** ``Literal["prerelease", "patch", "minor", "major"]`` Force the next version to be a specific bump type. This is equivalent to running the command: .. code:: shell semantic-release version -- # Ex: force a patch level version bump semantic-release version --patch **Required:** ``false`` .. seealso:: - :ref:`cmd-version-option-force-level` options for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-git_committer_email: ``git_committer_email`` """"""""""""""""""""""" The email of the account used to commit. If customized, it must be associated with the provided token. **Required:** ``false`` ---- .. _gh_actions-psr-inputs-git_committer_name: ``git_committer_name`` """""""""""""""""""""" The name of the account used to commit. If customized, it must be associated with the provided token. **Required:** ``false`` ---- .. _gh_actions-psr-inputs-github_token: ``github_token`` """""""""""""""" The GitHub Token is essential for access to your GitHub repository to allow the push of commits & tags as well as to create a release. Not only do you need to provide the token as an input but you also need to ensure that the token has the correct permissions. The token should have the following `permissions`_: * id-token: write * contents: write **Required:** ``true`` .. _permissions: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idpermissions ---- .. _gh_actions-psr-inputs-noop: ``no_operation_mode`` """"""""""""""""""""" If set to true, the github action will pass the ``--noop`` parameter to semantic-release. This will cause semantic-release to run in "no operation" mode. This is useful for testing the action without making any permanent changes to the repository. **Required:** ``false`` **Default:** ``false`` .. seealso:: - :ref:`cmd-main-option-noop` option for the :ref:`semantic-release ` command ---- .. _gh_actions-psr-inputs-prerelease: ``prerelease`` """""""""""""" Force the version to be a prerelease version when set to ``true``. This is equivalent to running the command: .. code:: shell semantic-release version --as-prerelease **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-as-prerelease` option for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-prerelease_token: ``prerelease_token`` """""""""""""""""""" Override any prerelease token in the configuration file with this value, if it is a pre-release. This will override the matching release branch configuration's ``prerelease_token`` value. If you always want it to be a prerelease then you must also set the :ref:`gh_actions-psr-inputs-prerelease` input to ``true``. This option is equivalent to running the command: .. code:: shell semantic-release version --prerelease-token **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-prerelease-token` option for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-push: ``push`` """""""" **Type:** ``Literal["true", "false"]`` Override whether the action should push any commits or tags from the local repository to the remote repository. This option is equivalent to adding either ``--push`` (on ``true``) or ``--no-push`` (on ``false``) to the :ref:`version ` command. **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-push` options for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-root_options: ``root_options`` """""""""""""""" .. important:: This option has been removed in v10.0.0 and newer because of a command injection vulnerability. Please update as to v10.0.0 as soon as possible. See :ref:`Upgrading to v10 ` for more information. Additional options for the main ``semantic-release`` command, which will come before the :ref:`version ` subcommand. **Example** .. code:: yaml - uses: python-semantic-release/python-semantic-release@v9 with: root_options: "-vv --noop" This configuration would cause the command to be ``semantic-release -vv --noop version``, which would run the version command verbosely but in no-operation mode. **Required:** ``false`` **Default:** ``-v`` .. seealso:: - :ref:`Options ` for the :ref:`semantic-release ` command ---- .. _gh_actions-psr-inputs-ssh_public_signing_key: ``ssh_public_signing_key`` """""""""""""""""""""""""" The public key associated with the private key used in signing a commit and tag. **Required:** ``false`` ---- .. _gh_actions-psr-inputs-ssh_private_signing_key: ``ssh_private_signing_key`` """"""""""""""""""""""""""" The private key used to sign a commit and tag. **Required:** ``false`` ---- .. _gh_actions-psr-inputs-strict: ``strict`` """""""""" If set to true, the github action will pass the `--strict` parameter to ``semantic-release``. .. seealso:: - :ref:`cmd-main-option-strict` option for the :ref:`semantic-release ` command ---- .. _gh_actions-psr-inputs-tag: ``tag`` """"""" **Type:** ``Literal["true", "false"]`` Override whether the action should create a version tag in the local repository. This option is equivalent to adding either ``--tag`` (on ``true``) or ``--no-tag`` (on ``false``) to the :ref:`version ` command. **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-tag` options for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-vcs_release: ``vcs_release`` """"""""""""""" **Type:** ``Literal["true", "false"]`` Override whether the action should create a release on the VCS. This option is equivalent to adding either ``--vcs-release`` (on ``true``) or ``--no-vcs-release`` (on ``false``) to the :ref:`version ` command. **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-vcs-release` options for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-verbosity: ``verbosity`` """"""""""""" Set the verbosity level of the output as the number of ``-v``'s to pass to ``semantic-release``. 0 is no extra output, 1 is info level output, 2 is debug output, and 3 is a silly amount of debug output. **Required:** ``false`` **Default:** ``"1"`` .. seealso:: - :ref:`cmd-main-option-verbosity` for the :ref:`semantic-release ` command ---- .. _gh_actions-psr-outputs: Outputs ------- The Python Semantic Release Action also provides outputs that can be used in subsequent steps of the workflow. These outputs are used to provide information about the release and any actions that were taken. ---- .. _gh_actions-psr-outputs-commit_sha: ``commit_sha`` """""""""""""" **Type:** ``string`` The commit SHA of the release if a release was made, otherwise an empty string. Example upon release: ``d4c3b2a1e0f9c8b7a6e5d4c3b2a1e0f9c8b7a6e5`` Example when no release was made: ``""`` ---- .. _gh_actions-psr-outputs-is_prerelease: ``is_prerelease`` """"""""""""""""" **Type:** ``Literal["true", "false"]`` A boolean value indicating whether the released version is a prerelease. ---- .. _gh_actions-psr-outputs-link: ``link`` """""""" **Type:** ``string`` The URL link to the release if a release was made, otherwise an empty string. Example upon release: ``https://github.com/user/repo/releases/tag/v1.2.3`` Example when no release was made: ``""`` ---- .. _gh_actions-psr-outputs-previous_version: ``previous_version`` """""""""""""""""""" **Type:** ``string`` The previous version before the release, if a release was or will be made. If no release is detected, this will be the current version or an empty string if no previous version exists. ---- .. _gh_actions-psr-outputs-released: ``released`` """""""""""" **Type:** ``Literal["true", "false"]`` A boolean value indicating whether a release was made. ---- .. _gh_actions-psr-outputs-release_notes: ``release_notes`` """"""""""""""""""" **Type:** ``string`` The release notes generated by the release, if any. If no release was made, this will be an empty string. ---- .. _gh_actions-psr-outputs-version: ``version`` """"""""""" **Type:** ``string`` The newly released SemVer version string if one was made, otherwise the current version. Example: ``1.2.3`` ---- .. _gh_actions-psr-outputs-tag: ``tag`` """"""" **Type:** ``string`` The Git tag corresponding to the ``version`` output but in the tag format dictated by your configuration. Example: ``v1.2.3`` ---- .. _gh_actions-publish: Python Semantic Release Publish Action '''''''''''''''''''''''''''''''''''''' The official `Python Semantic Release Publish Action`_ is a `GitHub Docker Action`_, which means at the beginning of the job it will build a Docker image that contains the Python Semantic Release package and its dependencies. It will then run the job step inside the Docker Container. This is done to ensure that the environment is consistent across all GitHub Runners regardless of platform. With this choice, comes some limitations of non-configurable options like a pre-defined python version, lack of additional 3rd party tools, and an inability to utilize caching. The primary benefit of using the GitHub Action is that it is easy to set up and use for most projects. We handle some additional configuration under the hood, so you don't have to handle it yourself. We do however provide a few customization options which are detailed individually below. Most importantly your project's configuration file will be used as normal, as your project will be mounted into the container for the action to use. If you have issues with the action, please open an issue on the `python-semantic-release/publish-action`_ repository. .. _Python Semantic Release Publish Action: https://github.com/marketplace/actions/python-semantic-release-publish .. seealso:: - `action.yml`__: the code definition for the publish action __ https://github.com/python-semantic-release/publish-action/blob/main/action.yml .. _gh_actions-publish-inputs: Inputs ------ GitHub Action inputs are used for select configuration and provide the necessary information to execute the action. The inputs are passed to the action using the ``with`` keyword in the workflow file. Many inputs will mirror the command line options available in the :ref:`publish ` command and others will be specific to adjustment of the action environment. This section outlines each supported input and its purpose. ---- .. _gh_actions-publish-inputs-config_file: ``config_file`` """"""""""""""" Path to a custom semantic-release configuration file. By default, an empty string will look for to the ``pyproject.toml`` file in the current directory. This is the same as passing the ``-c`` or ``--config`` parameter to semantic-release. **Required:** ``false`` **Default:** ``""`` .. seealso:: - :ref:`cmd-main-option-config` for the :ref:`semantic-release ` command ---- .. _gh_actions-publish-inputs-directory: ``directory`` """"""""""""" If the project is not at the root of the repository (like in monorepos), you can specify a sub-directory to change into before running semantic-release. **Required:** ``false`` **Default:** ``.`` ---- .. _gh_actions-publish-inputs-github_token: ``github_token`` """""""""""""""" The GitHub Token is essential for access to your GitHub repository to allow the publish of assets to a release. Not only do you need to provide the token as an input but you also need to ensure that the token has the correct permissions. The token should have the following `permissions`_: * ``contents: write``: Required for modifying a GitHub Release **Required:** ``true`` .. _permissions: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idpermissions ---- .. _gh_actions-publish-inputs-noop: ``no_operation_mode`` """"""""""""""""""""" If set to true, the github action will pass the ``--noop`` parameter to semantic-release. This will cause semantic-release to run in "no operation" mode. This is useful for testing the action without actually publishing anything. **Required:** ``false`` **Default:** ``false`` .. seealso:: - :ref:`cmd-main-option-noop` option for the :ref:`semantic-release ` command ---- .. _gh_actions-publish-inputs-root_options: ``root_options`` """""""""""""""" .. important:: This option has been removed in v10.0.0 and newer because of a command injection vulnerability. Please update as to v10.0.0 as soon as possible. See :ref:`Upgrading to v10 ` for more information. Additional options for the main ``semantic-release`` command, which will come before the :ref:`publish ` subcommand. **Example** .. code:: yaml - uses: python-semantic-release/publish-action@v9 with: root_options: "-vv --noop" This configuration would cause the command to be ``semantic-release -vv --noop publish``, which would run the publish command verbosely but in no-operation mode. **Required:** ``false`` **Default:** ``-v`` .. seealso:: - :ref:`Options ` for the :ref:`semantic-release ` command ---- .. _gh_actions-publish-inputs-tag: ``tag`` """"""" **Type:** ``string`` The tag corresponding to the GitHub Release that the artifacts should be published to. This option is equivalent to running the command: .. code:: shell semantic-release publish --tag Python Semantic Release will automatically determine the latest release if no ``--tag`` option is provided. **Required:** ``false`` .. seealso:: - :ref:`cmd-publish-option-tag` option for the :ref:`publish ` command ---- .. _gh_actions-publish-inputs-verbosity: ``verbosity`` """"""""""""" Set the verbosity level of the output as the number of ``-v``'s to pass to ``semantic-release``. 0 is no extra output, 1 is info level output, 2 is debug output, and 3 is a silly amount of debug output. **Required:** ``false`` **Default:** ``"1"`` .. seealso:: - :ref:`cmd-main-option-verbosity` for the :ref:`semantic-release ` command ---- .. _gh_actions-publish-outputs: Outputs ------- There are no outputs provided by the Python Semantic Release Publish Action at this time. .. note:: If you would like outputs to be provided by this action, please open an issue on the `python-semantic-release/publish-action`_ repository. .. _python-semantic-release/publish-action: https://github.com/python-semantic-release/publish-action/issues ---- .. _gh_actions-examples: Examples '''''''' Common Workflow Example ----------------------- The following is a simple common workflow example that uses both the Python Semantic Release Action and the Python Semantic Release Publish Action. This workflow will run on every push to the ``main`` branch and will create a new release upon a successful version determination. If a version is released, the workflow will then publish the package to PyPI and upload the package to the GitHub Release Assets as well. .. code:: yaml name: Continuous Delivery on: push: branches: - main # default: least privileged permissions across all jobs permissions: contents: read jobs: release: runs-on: ubuntu-latest concurrency: group: ${{ github.workflow }}-release-${{ github.ref_name }} cancel-in-progress: false permissions: contents: write steps: # Note: We checkout the repository at the branch that triggered the workflow # with the entire history to ensure to match PSR's release branch detection # and history evaluation. # However, we forcefully reset the branch to the workflow sha because it is # possible that the branch was updated while the workflow was running. This # prevents accidentally releasing un-evaluated changes. - name: Setup | Checkout Repository on Release Branch uses: actions/checkout@v4 with: ref: ${{ github.ref_name }} fetch-depth: 0 - name: Setup | Force release branch to be at workflow sha run: | git reset --hard ${{ github.sha }} - name: Evaluate | Verify upstream has NOT changed # Last chance to abort before causing an error as another PR/push was applied to # the upstream branch while this workflow was running. This is important # because we are committing a version change (--commit). You may omit this step # if you have 'commit: false' in your configuration. # # You may consider moving this to a repo script and call it from this step instead # of writing it in-line. shell: bash run: | set +o pipefail UPSTREAM_BRANCH_NAME="$(git status -sb | head -n 1 | awk -F '\\.\\.\\.' '{print $2}' | cut -d ' ' -f1)" printf '%s\n' "Upstream branch name: $UPSTREAM_BRANCH_NAME" set -o pipefail if [ -z "$UPSTREAM_BRANCH_NAME" ]; then printf >&2 '%s\n' "::error::Unable to determine upstream branch name!" exit 1 fi git fetch "${UPSTREAM_BRANCH_NAME%%/*}" if ! UPSTREAM_SHA="$(git rev-parse "$UPSTREAM_BRANCH_NAME")"; then printf >&2 '%s\n' "::error::Unable to determine upstream branch sha!" exit 1 fi HEAD_SHA="$(git rev-parse HEAD)" if [ "$HEAD_SHA" != "$UPSTREAM_SHA" ]; then printf >&2 '%s\n' "[HEAD SHA] $HEAD_SHA != $UPSTREAM_SHA [UPSTREAM SHA]" printf >&2 '%s\n' "::error::Upstream has changed, aborting release..." exit 1 fi printf '%s\n' "Verified upstream branch has not changed, continuing with release..." - name: Action | Semantic Version Release id: release # Adjust tag with desired version if applicable. uses: python-semantic-release/python-semantic-release@v10.4.1 with: github_token: ${{ secrets.GITHUB_TOKEN }} git_committer_name: "github-actions" git_committer_email: "actions@users.noreply.github.com" - name: Publish | Upload to GitHub Release Assets uses: python-semantic-release/publish-action@v10.4.1 if: steps.release.outputs.released == 'true' with: github_token: ${{ secrets.GITHUB_TOKEN }} tag: ${{ steps.release.outputs.tag }} - name: Upload | Distribution Artifacts uses: actions/upload-artifact@v4 with: name: distribution-artifacts path: dist if-no-files-found: error deploy: # 1. Separate out the deploy step from the publish step to run each step at # the least amount of token privilege # 2. Also, deployments can fail, and its better to have a separate job if you need to retry # and it won't require reversing the release. runs-on: ubuntu-latest needs: release if: ${{ needs.release.outputs.released == 'true' }} permissions: contents: read id-token: write steps: - name: Setup | Download Build Artifacts uses: actions/download-artifact@v4 id: artifact-download with: name: distribution-artifacts path: dist # ------------------------------------------------------------------- # # Python Semantic Release is not responsible for publishing your # # python artifacts to PyPI. Use the official PyPA publish action # # instead. The following steps are an example but is not guaranteed # # to work as the action is not maintained by the # # python-semantic-release team. # # ------------------------------------------------------------------- # # see https://docs.pypi.org/trusted-publishers/ - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@@SHA1_HASH # vX.X.X with: packages-dir: dist print-hash: true verbose: true .. important:: The `concurrency`_ directive is used on the job to prevent race conditions of more than one release job in the case if there are multiple pushes to ``main`` in a short period of time. Secondly the *Evaluate | Verify upstream has NOT changed* step is used to ensure that the upstream branch has not changed while the workflow was running. This is important because we are committing a version change (``commit: true``) and there might be a push collision that would cause undesired behavior. Review Issue `#1201`_ for more detailed information. .. warning:: You must set ``fetch-depth`` to 0 when using ``actions/checkout@v4``, since Python Semantic Release needs access to the full history to build a changelog and at least the latest tags to determine the next version. .. warning:: The ``GITHUB_TOKEN`` secret is automatically configured by GitHub, with the same permissions role as the user who triggered the workflow run. This causes a problem if your default branch is protected to specific users. You can work around this by storing an administrator's Personal Access Token as a separate secret and using that instead of ``GITHUB_TOKEN``. In this case, you will also need to pass the new token to ``actions/checkout`` (as the ``token`` input) in order to gain push access. .. _#1201: https://github.com/python-semantic-release/python-semantic-release/issues/1201 .. _concurrency: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idconcurrency Version Overrides Example ------------------------- In the case where you want to provide multiple command line options to the :ref:`version ` command, you provide them through the ``with`` directive in the workflow file. In this example, we want to force a patch version bump, not produce a changelog, and provide specialized build metadata. As a regular CLI command, this would look like: .. code:: shell semantic-release version --patch --no-changelog --build-metadata abc123 The equivalent GitHub Action configuration would be: .. code:: yaml # snippet - name: Action | Semantic Version Release # Adjust tag with desired version if applicable. uses: python-semantic-release/python-semantic-release@v10.4.1 with: github_token: ${{ secrets.GITHUB_TOKEN }} force: patch changelog: false build_metadata: abc123 .. seealso:: - `Publish Action Manual Release Workflow`_: To maintain the Publish Action at the same version as Python Semantic Release, we use a Manual release workflow which forces the matching bump type as the root project. Check out this workflow to see how you can manually provide input that triggers the desired version bump. .. _Publish Action Manual Release Workflow: https://github.com/python-semantic-release/publish-action/blob/main/.github/workflows/release.yml .. _gh_actions-monorepo: Actions with Monorepos '''''''''''''''''''''' While ``python-semantic-release`` does **NOT** have full monorepo support, if you have multiple projects stored within a single repository (or your project is not at the root of the repository), you can pass the :ref:`directory ` input to the action to change directory before semantic-release execution. For multiple packages, you would need to run the action multiple times, to release each project. The following example demonstrates how to release two projects in a monorepo. Remember that for each release of each submodule you will then need to handle publishing each package separately as well. This is dependent on the result of your build commands. In the example below, we assume a simple ``build`` module command to build a ``sdist`` and wheel artifacts into the submodule's ``dist`` directory. The ``directory`` input directive is also available for the Python Semantic Release Publish Action. .. code:: yaml jobs: release: env: SUBMODULE_1_DIR: project1 SUBMODULE_2_DIR: project2 steps: # ------------------------------------------------------------------- # # Note the use of different IDs to distinguish which submodule was # # identified to be released. The subsequent actions then reference # # their specific release ID to determine if a release occurred. # # ------------------------------------------------------------------- # - name: Release submodule 1 id: release-submod-1 uses: python-semantic-release/python-semantic-release@v10.4.1 with: directory: ${{ env.SUBMODULE_1_DIR }} github_token: ${{ secrets.GITHUB_TOKEN }} - name: Release submodule 2 id: release-submod-2 uses: python-semantic-release/python-semantic-release@v10.4.1 with: directory: ${{ env.SUBMODULE_2_DIR }} github_token: ${{ secrets.GITHUB_TOKEN }} # ------------------------------------------------------------------- # # For each submodule, you will have to publish the package separately # # and only attempt to publish if the release for that submodule was # # deemed a release (and the release was successful). # # ------------------------------------------------------------------- # - name: Publish | Upload package 1 to GitHub Release Assets uses: python-semantic-release/publish-action@v10.4.1 if: steps.release-submod-1.outputs.released == 'true' with: directory: ${{ env.SUBMODULE_1_DIR }} github_token: ${{ secrets.GITHUB_TOKEN }} tag: ${{ steps.release-submod-1.outputs.tag }} - name: Publish | Upload package 2 to GitHub Release Assets uses: python-semantic-release/publish-action@v10.4.1 if: steps.release-submod-2.outputs.released == 'true' with: directory: ${{ env.SUBMODULE_2_DIR }} github_token: ${{ secrets.GITHUB_TOKEN }} tag: ${{ steps.release-submod-2.outputs.tag }} # ------------------------------------------------------------------- # # Python Semantic Release is not responsible for publishing your # # python artifacts to PyPI. Use the official PyPA publish action # # instead. The following steps are an example but is not guaranteed # # to work as the action is not maintained by the # # python-semantic-release team. # # ------------------------------------------------------------------- # - name: Publish | Upload package 1 to PyPI uses: pypa/gh-action-pypi-publish@SHA1_HASH # vX.X.X if: steps.release-submod-1.outputs.released == 'true' with: packages-dir: ${{ format('{}/dist', env.SUBMODULE_1_DIR) }} - name: Publish | Upload package 2 to PyPI uses: pypa/gh-action-pypi-publish@SHA1_HASH # vX.X.X if: steps.release-submod-2.outputs.released == 'true' with: packages-dir: ${{ format('{}/dist', env.SUBMODULE_2_DIR) }} python-semantic-release-10.4.1/docs/configuration/automatic-releases/index.rst000066400000000000000000000006601506116242600275710ustar00rootroot00000000000000.. _automatic: Automated Releases ------------------ The key point with using this package is to automate your releases and stop worrying about version numbers. Different approaches to automatic releases and publishing with the help of this package can be found below. Using a CI is the recommended approach. .. _automated-release-guides: Guides ^^^^^^ .. toctree:: :maxdepth: 2 travis github-actions cronjobs python-semantic-release-10.4.1/docs/configuration/automatic-releases/travis.rst000066400000000000000000000042301506116242600277670ustar00rootroot00000000000000.. _travis_ci: Travis CI ========= This guide expects you to have activated the repository on Travis CI. If this is not the case, please refer to `Travis documentation`_ on how to do that. 1. Add python-semantic-release settings ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ See :doc:`../configuration` for details on how to configure Python Semantic Release. Make sure that at least you have set :ref:`config-version_variables` before continuing. 2. Add environment variables ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ You will need to set up an environment variable in Travis. An easy way to do that is to go to the settings page for your package and add it there. Make sure that the secret toggle is set correctly. You need to set the :ref:`GH_TOKEN ` environment variable with a personal access token for Github. It will need either ``repo`` or ``public_repo`` scope depending on whether the repository is private or public. More information on how to set environment variables can be found on `Travis documentation on environment variables`_. 3. Add travis configuration ^^^^^^^^^^^^^^^^^^^^^^^^^^^ The following should be added to your ``.travis.yml`` file. .. code-block:: yaml after_success: - git config --global user.name "semantic-release (via TravisCI)" - git config --global user.email "semantic-release@travis" - pip install python-semantic-release - semantic-release version && semantic-release publish The first line tells Travis that we want to run the listed tasks after a successful build. The two first lines in after_success will configure git so that python-semantic-release will be able to commit on Travis. The third installs the latest version of python-semantic-release. The last will run the publish command, which will publish a new version if the changes indicate that one is due. 4. Push some changes ^^^^^^^^^^^^^^^^^^^^ You are now ready to release automatically on Travis CI on every change to your master branch. Happy coding! .. _Travis documentation: https://docs.travis-ci.com/ .. _Travis documentation on environment variables: https://docs.travis-ci.com/user/environment-variables/#Defining-Variables-in-Repository-Settings python-semantic-release-10.4.1/docs/configuration/configuration-guides/000077500000000000000000000000001506116242600262645ustar00rootroot00000000000000python-semantic-release-10.4.1/docs/configuration/configuration-guides/index.rst000066400000000000000000000006311506116242600301250ustar00rootroot00000000000000.. _config-guides: Configuration Guides ==================== This section provides detailed guides on how to configure PSR for various use cases and integrations. It is recommended to complete the :ref:`Getting Started Guide ` first before diving into these more specific configurations. .. toctree:: :maxdepth: 1 Monorepos UV Project Setup monorepos-ex-easy-before-release.png000066400000000000000000003442771506116242600352040ustar00rootroot00000000000000python-semantic-release-10.4.1/docs/configuration/configuration-guides‰PNG  IHDRl&ãl JiCCPICC ProfileH‰•WXSÉž[R!D@JèM‘@J-€ô"ˆJH„cBP±£‹ ®]D°¢« бaWÅîZ *+ëbÁ®¼ tÙW¾w¾oîýï?gþ9çܹe ·ó¥ÒT€\Iž,&ØŸ5.)™Eêd  ®|\Ê‰Š Ðηw7¡´kJ­öÿWÓŠä(ˆÓ„rA.ÄÀ›RYD)äͧæI•x5Ä:2 ÄUJœ¡ÂMJœ¦ÂWú|âb¸?€¬ÎçË2Ðè†<+_uè0[à$Š%ûAì“›;Yñ\ˆm œ“®Ôg§ý “ñ7Í´AM>?c«ré3r€X.ÍáOÿ?Ëñ¿-7G10‡5l꙲eΰnO²'‡)±:Ä$i‘k€âbaŸ¿33!ñ*ÔF çš&Äcä9±¼~>FȃØâtINDx¿Oaº8Hé뇖‰óxqëA\%’ÆöûœMŽ˜÷fºŒËéçŸóe}1(õ¿)²ã9*}L;SÄë×Ç 2ã!¦B/Nˆ€XâyvlX¿OJA&7bÀG¦ˆQæb±L$ öWéc¥é² ˜~ÿ¹òܱ™b^D?¾š—¢ªöDÀï‹æ‚u‹$œø‘|\ø@.BQ@ *wœ,’ÄǪx\Ošç£‹ÛIs¢úýqQN°’7ƒ8Nž;06?.N•>^$Í‹ŠSʼn—gñC£Tñà{A8à‚À ØÒÀdÄ­]õ]ðJÕø@2€8ô3#ûz$ð ÀŸ‰€|pœ_¯äCþëVɉ9ÕѤ÷÷)U²ÁSˆsAÈ׊>%É` à dÄÿˆˆ›æ›²ÿßóìw†™ð~F10#‹>àI $CˆAD[Ü÷Á½ðpxôƒÍgãy|÷'<%´nÚ w&‰ eC¢ Ú¡~P}Ò~¬n5]qܪCeœ‰ÜÎÃÁ}áÌ®åöÇ­¬ kˆöß2øáõûQœ((eÅb3t¤††ë Š²Ö?ÖGkÚ`½¹ƒ=CççþP}!<‡ õÄa°sØIìÖ„ÕvkÀZ°£J<¸âžô­¸ÙbúâɆ:C×Ì÷;«¬¤Ü©Æ©Ó鋪/O4-Où0r'K§ËÄ™y,übˆX<‰ÀqËÙÉÙå÷Gõz{Ý÷]A˜-ß¹ù¿à}¼··÷Èw.ô8ûÜá+áðwΆ ?-jœ?,PÈòU®<à›ƒŸ>}` Ì ÌǸ/àA(ˆq L„ÑgÂu.SÁL0°¬å`Ø ªÀn°Ôƒ&pœ—ÀpÜ…«§¼ÝàøŒ  ¡! D1A,{Äa#>H ŽÄ IH*’H2™” +‘rd RìC#'‘ Hryˆt"¯‘O(†ª£:¨j…ŽDÙ( CãÐ h:-@ KÑ2´Ý…Ö¡'ÑKè ´}ö`Sؘ)怱1.‰%cé˜ ›c¥X%V‹5Âû| kǺ°8gà,Ü®à<àSðÙø¼¯ÂëðÓø5ü!Þ#І{‚'GGÈ L%J Û ‡gà³ÔAxG$™Dk¢;|“ˆYÄÄ%Ä Ä=ÄÄ6âcb‰DÒ'Ù“¼I‘$>)TDZGÚE:NºJê } «‘MÈÎä r2YB.$—’w’‘¯’Ÿ‘?S4)–OJ$EH™NYFÙFi¤\¦tP>Sµ¨ÖToj5‹:ZF­¥ž¡Þ£¾QSS3SóP‹V«ÍU+SÛ«v^í¡ÚGumu;u®zŠºB}©úõêwÔßÐh4+š-™–G[J«¦¢= }Ð`h8jð4„s4*4ê4®j¼¤Sè–t}"½€^J?@¿LïÒ¤hZir5ùš³5+4kÞÒìÑbhÒŠÔÊÕZ¢µSë‚Ösm’¶•v ¶P{öVíSÚÜÁeóÛg:DkžN–N‰ÎnVn]m]ÝÝiººGuÛ™ÓŠÉcæ0—1÷3o2? 3Æ&¶xXí°«ÃÞë ×óÓéëíÑ»¡÷IŸ¥¨Ÿ­¿B¿^ÿ¾n`gm0Õ`£Áƒ®á:ý† †ß?ü7CÔÐÎ0Æp†áVÃÃ#c£`#©Ñ:£SF]ÆLc?ã,ãÕÆÇŒ;M&>&b“Õ&ÇMþ`é²8¬Vë4«ÛÔÐ4ÄTaºÅ´Õô³™µY¼Y¡Ù³ûæTs¶yºùjófón ‹±3-j,~³¤X²-3-×Zž³|oem•hµÐªÞê¹µž5ϺÀºÆúž ÍÆ×fŠM¥Íu[¢-Û6Ûvƒí;ÔÎÕ.Ó®Âî²=jïf/¶ß`ß6‚0Âc„dDåˆ[ê‡|‡‡‡ŽLÇpÇBÇzÇ—#-F&\1òÜÈoN®N9NÛœîŽÒ:ªpTã¨×ÎvÎç çë£i£ƒFÏÝ0ú•‹½‹Èe£ËmW†ëX×…®Í®_ÝÜÝdnµnîî©îëÝo±uØQì%ìó9M=Ý<ó<÷{þååà•íµÓëùë1¢1ÛÆ<ö6óæ{oñn÷aù¤úlöi÷5õåûVú>ò3÷úm÷{Ʊådqvq^ú;ùËüù¿çzrgqO`ÁÅ­Úñå‚Ì‚2‚j‚ºƒ]ƒgŸ!„„…¬¹Å3â xÕ¼îP÷ÐY¡§ÃÔÃbÃÊÃ…Û…ËÂÇ¢cCÇ®{/Â2BQ "y‘«"ïGYGM‰:MŒŽŠ®ˆ~3*ff̹XFì¤Ø±ïâüã–ÅÝ·‰WÄ7'ÐRªÞ'$®Ll7rܬq—’ ’ÄI ɤä„äíÉ=ãǯß‘âšR”rs‚õ„i.L4˜˜3ñè$ú$þ¤©„ÔÄÔ©_ø‘üJ~O/m}Z·€+X+x!ô®vмE+EÏÒ½ÓW¦?ÏðÎX•Ñ™é›YšÙ%æŠËů²B²6e½ÏŽÌÞ‘Ý›“˜³'—œ›š{X¢-É–œžlÅsÊš)ݲ0Ùv9"Ÿ oÈÓ?ú- ÅOЇù>ùù¦&L=0MkšdZËt»é‹§?+*øe>C0£y¦éÌy3ÎâÌÚ2™6»yŽùœs:æÏ­šG—=ï×B§Â•…oç'Îo\`´`î‚Ç?ÿTS¤Q$+ºµÐká¦Eø"ñ¢ÖÅ£¯[ü­XX|±Ä©¤´äËÁ’‹?ú¹ìçÞ¥éK[—¹-Û¸œ¸\²üæ ßU+µV¬|¼j쪺լÕū߮™´æB©Ké¦µÔµŠµíeáe ë,Ö-_÷¥<³üF…Åžõ†ë¯¿A¸áêF¿µ›Œ6•lú´Y¼ùö–à-u•V•¥[‰[ó·>Ý–°íÜ/ì_ª·l/Ùþu‡dG{ULÕéj÷êê†;—Õ 5ŠšÎ])»®ìØÝPëP»esOÉ^°W±÷}©ûnîÛß|€} ö åÁõ‡‡ŠëºéuÝõ™õí I m‡C77z5:âxdG“iSÅQݣˎQ-8Ö{¼àxÏ é‰®“'7Oj¾{jܩ맣O·ž ;sþlÐÙSç8玟÷>ßtÁóÂá‹ì‹õ—Ü.Õµ¸¶úÕõ×C­n­u—Ý/7\ñ¸ÒØ6¦íØUß«'¯\;{wýÒˆm7ãoÞ¾•r«ý¶ðöó;9w^ý–ÿÛç»sïîß×¼_úÀðAåï¶¿ïiwk?ú0àaË£ØGw ¿x"ò¥cÁSÚÓÒg&ϪŸ;?oê ê¼òÇø?:^H_|î*úSëÏõ/m^üËﯖîqݯd¯z_/y£ÿfÇ[—·Í=Q=Þå¾ûü¾øƒþ‡ªìç>%~zöyêÒ—²¯¶_¿…}»×›ÛÛ+åËø}¿PnmÒx½Z ¸o¤ŽWíû QíiûøOXµ‡ì37já?}tü»¹ÀÞmXA}z Q4â<:zô`ØËõí;•F„{ƒÍ¯i¹iàߘjOúCÜCÏ@©ê†žÿsƒ ‹v›ŠeXIfMM*>F(‡iN’†x l &ASCIIScreenshotê™_” pHYs%%IR$ð×iTXtXML:com.adobe.xmp 550 1132 Screenshot /yLiDOT(Àÿàÿг@IDATxì¼EõÇ' )$!”zï¡#MQ@¤Û¥(@ÿ ‚€€PDAšÒBÞ‹ôBM€@!!õ¿ÎæÜy{ïÝÛÞ»÷½ßÉ'oÛÌìÌoföîùí9gz­°Â óƒ¤G!ð‘|$iïë¯¿Þ£Ú­Æ ! „€B@! „€hz‰°i—®j\=EØ4K•$„€B@! „€B ˆ°iª-^¦›ï UO! „€B@! z<"lzàaÓ;]MB@! „€B@¶B@„M[uWc*+¦18ª! „€B@! „€ÍB@„M³márEØ´pç¨jB@! „€B@!  ˆ°éÃ@„Mìt5Y! „€B@! Ú 6mÕ]©¬›Æà¨R„€B@! „€B@4 6ÍB¶…ËaÓ£ª ! „€B@! „€( ¦6=°ÓÕd! „€B@! „€h+DØ´Uw5¦²"lƒ£JB@! „€B@!Ð,DØ4 Ù.W„M wŽª&„€B@! „€B €€›8 DØôÀNW“…€B@! „€B ­aÓVÝ՘ʊ°i Ž*E! „€B@! „@³aÓ,d[¸\6-Ü9ªšB@! „€B@"lzà0aÓ;]MB@! „€B@¶B@„M[uWc*+¦18ª! „€B@! „€ÍB@„M³márEØ´pç¨jB@! „€B@!  ˆ°éÃ@„Mìt5Y! „€B@! Ú 6mÕ]©¬›Æà¨R„€B@! „€í€ÀŠ+®öÙgŸðÔSO…«®º*Ì›7¯KªÝ§OŸ°Ûn»…•W^9\|ñÅaâĉ]Rv¹i—6‹-¶XèÕ«Wx÷ÝwÛ¯Üõ<餓ÂÀÒôÇ{lË´S„MÚ-ÚB@! „€B@tk–]vÙð›ßü&mãO<Ž9æ˜ô¸3w~þóŸ‡UVY%½å!‡&Ožœk§N%l>÷¹Ï…-·Ü2 4(!k¨ÎìÙ³Ão¼ž}öÙpÅWt –íÒK/MÛG¿ùÍo†×_Ý.6]Þª€B@! „€u ðo|£Hñ‹š>}zxë­·Â3Ï<®¿þúDçŒÓô”ãï|ç;a«­¶J›;þü°÷Þ{§Ç¹ëÉãÇgžyfgV¡­îÕi„Í;ì`ϰ¨)' žóÎ;/\wÝuå’µüµx аiù.S…€B@! „€h~ÿû߇#Fäª-:æƒ>~ñ‹_ôH⃉ï~÷»)VS¦L tPz܈µ×^;pÀiQ¯¾új8å”SÒcÛ9÷ÜsÃK,a‡áä“O÷ÜsOz¬b:…°ùüç?öÚk¯â;W8ºï¾ûnEí*"lÚµçTo! „€B@! ZjkË{ï½¾ÿýï÷H 6ßl³ðV¬9묳“O>i°4d»Ë.»„¯~õ«iYX8í·ß~é±í¬±Æ‰!Çâ‹/î¼óΤ.vMÛŽ4°!¸ÌZ–eÍ›o¾fΘ–^f™@ð¡Xþþ÷¿‡üãñé¶8aÓݤJ ! „€B@!ІÄ„ V4sæÌIZÒ·oß’-j†uIÉ›õ  y ›ICšÚtÂ&žHÔú±Ç šQ kL¶ÝvÛðíoÛ“-±m>ûÙϦç0傉3¹í¶Û´iÓ†n¶ÞzëÀÄ$ÎsÏ=gI’-,Þšk®™D¢&ŽÎ„ ’:<òÈ#áý÷ß/JËÁꫯVZi¥ôüã?^xá…0jÔ¨Ä÷oôèÑv–`MøÜeIa3iÒ¤°þúë‡m¶Ù&1{øá‡m€¸êLQ ›ÎD[÷B@! „€B ÑÄzæwÜ~ùË_&·A/Ü|óÍÝm£6êpëþóŸá‚ .èpžÆ ŸúÔ§ÝEr xˆƒsíµ×Vt§Z¦`ˆ°ÞzëôÏ%—\2¼ôÒK]ò¡‡JôÖÌ~xr©¥– Ÿüä'e ³’}W.¯7[¥tÖVX!|ìc ÇO,Xn¹å–¤MàaBüXsC°bçw¶KaÖ¬YáÆoLŽÑÑÑ£©Ï+¯¼Æ×!.ëòË/ÖYg°îºë†7Þ8-]þ/ùKrL^0@6+Xù€Éí·ßÞyç;L·t|⟫­¶Z’žzœø?ÿùO¢‡§ ÝN)LFŽÑB[ˆkDŸר¤©„ ñ‡?ü¡‡©S§†<0s1…—£>:YzŒsýë_CÿþýÓË¿ûÝï?¹E]4=wùå—'é8éB~Oò¤ ;°°gŸ}v: íÚO~ò“d’Ù1¦Z 8ˆ–X>øàƒÄ¬ŽAè%&lŽ8âˆp '„nå(Kϲj矾6}+¦éëB@! „€B@4r„¿-n:Xxá£ÿW¾ò*ÙGgÛtÓM;œçÄܹs÷Ìú`Žú£ý(¬ºêª™y9yÍ5ׄ?þñ™×щSJþýï‡sÎ9§èr–κôÒKŒ L0:øÞ÷¾—Aèà&èäæ¾4dÈð§?ýÉ.%[p8î¸ã„U,·ÞzkÑŠSY:|œ‡ÅwpÉB ÊX€ÈäŒ3ÎHH;f»ýöÛ‡¯ýë™^8\ûí·“˜<ô£—,LÐù·Øb Ÿ,Ù‡$:¶°’3±vZYšJØtȳuA‡–vïÐC-"5p‰2‹™˜°iŒ #l`5üãgºbÅrÙe—…¿ýíoé鸣g¬p ˜^w°¶a@ê˜Ä„ K—ÃL–&/“¸3D„Mg ¬{! „€B@!Ð,ò6ÜÿÄO k­µVQUâEa¾õ­o…í¶Û®(M|€òê©§†»îº«èÒ…^ØA/-JðáÁ£>Ž?þø¢K#XåT’ÿýï ‰bé:謺q­„M–žm÷e é‚•ÒhÂr…ÉY!U’~ø²†•¼õQŒ zzédåx"Éεڶ©„M >…ŸùÌgjÆ &l² 2†àÞ­‰É‹Ö»wï0tèТ¬tä—¿üåô\\ïôB™Ìä~ðƒ¤)bÂÆ.P¬Á‡[Ô×¾ö5KÖÔ­›¦Â«Â…€B@! „€h2Õ6xq@,xùíon¾ùæäTÖ"9¸àà¢äcâ Ïýð‡?L½@pñ!±È\© ‰p±2!/–=|ÌG;ì°^¤!`oÑðÓŸþ4<ðÀIÞ<:k­„Mrƒ2¨ãQG•¸Š}îsŸK\ÈYd‘1x£ O?ýtJT•³°¸Âê%Ö—é‹~ýúu(ŒÑ¡çÍ›—Ü'&IB÷ç˜cŽ)ébå’uÙnS Ì›à&Y¤îLÞ$ÊÒÚ–Ál–+Y„ þw_|qâÛƒylç,1ækt¤ B‚&Ž™Teu4ƒäôÓO÷ßâ ë'.eí³Ï>é`‰ õ¯~õ«€o%÷çá]µ¨ã¾ûîë«Ô´}6MƒV ! „€B@!Ð TCØd¹ýàÉÁ"7^w¤êè|?ûÙÏ’VpPĘ1ñëc Âi`…cÂÊÇ,ÄcrÞyç¥!9¸¿×)10ÀògæÌ™ aC^¯O ×+$KgEo¾è¢‹ÂÝwß襔MèbãTãEùÄŠAÿ¦ ç€Îçoõ…ŽšJØ0=)›qѹ•üõ07³õÛcÂræàƒNI,y¶qYÜÃLÛâŽf ÀšÂPšÀþ19½xv.&ln¸á†$^Ž¥ßi§’ºÛ1[ÚB¤f‹›f#¬ò…€B@! „€h&Õ6Ôã’K.I?ès|ÓM7%DÌV[m¾óïp*H¿ð '—Xb‰pî¹ç.HPøë?¶ãâDÐ]¬c lXজ '£/{!<ˆJ@_Oþ°`ÎþûïŸd‰uVê„‹PÖ‚6Õ6,®ƒnë%&S^{íµ€ƒI½„ Ä䉗8Ö+¤ ñv|¨‘çŸ>µpŠ1ÉZZ<=†ÐiUi*awª™dy0*6ž¹‹I–r›ì ±gXAŠ N0«Ê3•¤-GØdY‘çÂBÔkÛÆXZ®Å„ ñt,26שíñƒbâĉþTSöEØ4V*„€B@! „@'!P aƒçŠºV&ö «By!(o,qX HÈ]wÝ53€1 Þ X‡üë_ÿ /¾øbQ‘¬äD`]܇pÍÊ+19ñòË/'.VYù«%l°.ÂÊÈKÜΘت—°Á É“SÜ;+þ-aH6Ùd“´j>€rŒIñtÚi§ˆ0“{ï½7YÁÚŽ[mÛT¿6¿´&^˜Sy‰I}¿<[LØÀ„Áˆe Áˆ1[e•U².w8Wް…XŠ%îlˆ$&lâ VYiDØ$ÐéB@! „€B ,Õ6,9m®DV¨¹Ò°Š’_òÚ®WÚ’«–g9írBÜÔŸÿüçi¼”Ã?<1,°<“&MJ,dì¸Ò6&'¼gJœ·ZÂâÈâÏXYY„Š‘V¤©—°ÙrË-—'»^.{ï½·¦[îI(OtŘÄîiä‰ Fz4aot–³X1ÛéýöÎ<óÌ"7*Vob'$/aä-õK€'|ø‡zÆK9Âæ©§žJ–÷éÙ§®f­Â±"l@D"„€B@! „€h<Õ6,0³Ûn»UK bÑÄÄ ‰°Ž‰½=ÒÜâ¾Ä*CºåA ²‚[Yä·€Åñ}  Ky%9aeUKØPêã•؈B…6!õ6cÇŽ Gy¤¿eQŒX»[Dïç _øBr9&"l É–ÀDG}´;SLj](`Ñâ…ü&H^Â&îlÑøñã“àNø¸1ã`Gå&ʼn:ÿüó~vˆ›- ! „€B@! ƒ@^†•ƒO>ùä¢öÞeÏ=÷L+U*Fš Â–6¬…UÞ¶Že3K˜/~ñ‹a=ö°Ó¡ÔŠÊǬÀÄjÇİÁkÉCNXáÕ6~-+ƒÕ ¼Å ‹òГz ›Q£F%‹üXylTóçbOtåÁD„G³°ÿç?ÿ¹ˆe„<9ûì³ÓÈØ–œ i›’ù•—ò60s6&>Š7ç²¢„—#lÈC©ë®»ŽÝDâ ÆI¢V›o¢›8é¯B@! „€B Ñä!lˆUÂJFÞ£ƒzøU ÖXc„ü°ú¡¯²¢’#v†x¨XÓÌÖ¬YmµÕ’%§-+9™Õ :îo~ó›°ôÒKÛåÄ*‡pYnZxp`h`/Ž[ÖBHrÂÊ©–°a¥fB™˜¥íFf…gô^ô_“˜°tyÃÂÒņĽeE*pWëzôÑG zü9眓Xvîá‡'žxbr˜6†Ü‡ÛØÚÅ.³b…1#c™kÌÇâ@N÷ÜsO†Zž¼„ `ûí··lI0(|ñ1Ä…‰ Jñ½*6 4üÛEpêìݪ¸îG6)üÚB@! „€B@4˜°`!È,A‚ÅFLÔp BƇéˆõLÙ{ì±ÉŠM.Äõqn|ÜBxx"ÓAXÅüú׿N꽑[o½5!qØ=6¨« ÝrË-aÓM7MH¼PL8úé§'‡yÈ ËW-aC>,W¸íßvÛm£+-$ d‹É6ÛlÓ!èñƒ>˜\F矽ß?äCRâ#9ayk!l,oÖýàƒ."½pÝ“&K lÀ )Gذ|:îX¥bÑÆeÇy0a£X8†ÙÄÿ «”<Y»†9™—¼„ yèhozæËÁ†Éç'O9† 5zôè’)k¹r6qí ! „€B@! ‡@µ„ $ÃI'Ì#®I^òÕ¯8)ƒ!dC%ñ+ [ÚvÚ)!?ì8k‹þJ¼Ô«¯¾:½œ‡œ°ÄÕ6„)µÚ2uÁúȬ™ìlqÿZvÙeý©d?/aC⥖Z*±"ʲŽòÇýÀµ<˜ˆ°ñ(FûLXAoÖå“Ðù·Ýv[B¶@¨ÄB@bŸ×“,qZXJ&Òšk®YdUƒ©fiøÛ1¹LDC‡duô¸qã/Ÿ‡úâ"ÅRÞL^bÂÆ³¡–î’K.)ª›- g×›µµ•­˜8! „€B@! „@»!pÖYg%Ê}©z£«Íš5+‰Esýõ×'dG–ŽéóãÖ³ÿþûwŸAš…€¿)裔•%ßøÆ7ÂG?úÑ"£ÒQbÞP_‹{ç‡Pa¹êx…)òb©ƒþJ</ÄmYk­µÒS·ß~{B¥'ÜNL ¡£A¿Å Ë ua,ÜÀ¼õœ÷Ç?†›o¾Ù'O÷©?^2[l±E‘•ÌË/½+,cŽÄ1ni€HŸ#Ž8¢È•Ì®œO;í4;•nó`G@ü ´ç¨U¥WÁUi~gW'@Âu‰ý‰'bÚL˜0¡CP¢FÔ w,bÖP¾÷U,Uvasê©§&ÉЬAÏòa¬8å]¶J•×jçEØ´Z¨>B@! „€B@´ XyŒ3&Ñ!'Mš”¬Z̪Hy„½èºèŸè‹Ï>ûln= ÏŽÁƒ'º+–.Í–,Âf¯½öJn YÔçÕW_MݱòÖ ]’¬Vhå•WÄ­¨!&m5xæ­g+§ë¦•¡nå›V¯{žú‰°Éƒ’Ò! „€B@! º7å›îÝòöh›Œ~a“ŠN ! „€B@! „@·B@„Mkw§›Œþa“ŠN ! „€B@! „@·B@„Mkw§›Œþ!È’L,uÍ5רaÛoåÕö]¨! „€B@! êF`ÑEM‚îZpa5³ô¸¤5aÓýЩµaÓ©pëfB@! „€B@! ªF@„MÕµ6í߇jB@! „€B@toDØtïþÍl›LXtR! „€B@! „@Ë Â¦eº¢ó*"¦ó°Ö„€B@! „€B@Ô‚€›ZPkó<"lÚ¼U}! „€B@! „€èöˆ°éö]ܱ"l:b¢3B@! „€B@! Z 6­ÔT6´n#„€B@! „€B FDØÔ\;gaÓν§º ! „€B@! „@O@@„MOèå¨"l"@t(„€B@! „€B ÅaÓbÒÕaÓ(ëB@! „€B@! jG@„MíØµmN6mÛuª¸B@! „€B@ôDØôŽöÍaãÑоB@! „€B@ÖC@„MëõIÓk$¦éëB@! „€B@! êB@„M]ðµgf6íÙoªµB@! „€B@ôDØôœ¾N[*Â&…B;B@! „€B@! Z6-Ù-Í­”›æâ«Ò…€B@! „€B@Ô‹€›zlÃü"lÚ°ÓTe! „€B@! „€èQˆ°éQݽ ±"lz`§«ÉB@! „€B@!ÐVˆ°i«îjLeEØ4G•"„€B@! „€B Yˆ°i²-\®›îUM! „€B@! „@6=pˆ°é®& ! „€B@! „@[! ¦­º«1•mWÂfàª;†þË ýF¬ú Z²F¯Æ¢RêD`~˜;ý­0ë§Ã̉÷„÷ŸWgyÊ.„€B@! „€"lzàh7ÂfÀŠ[†¡›T i†÷ÀÞj¿&Ïþf˜z×9aÆ ··_åUc! „€B@! „@‹ ¦E:¢3«ÑN„Í÷ ƒ×ß7gÞ;üÙï‡Þ‡…^– ½z÷éLØÂ»—|¾Sï×N7ë½ØÈÐgäÚ¡ï [‡>ÃWMªþîC—„i÷]ÐNÍP]…€B@! „€-ƒ€›–éŠÎ«H»6ž¬™óÆ“a‘«wHwa“JÆ©¾+íúot@rE¤M@:%„€B@! „€È€› u·$í@Øà5l‡£èç¾ý|è³Ä˜.ï6ù» ÏGÖ ·ùA’aÊM'É=*?tJ)„€B@! „€HaÓB;6ùÜùIÌšV°¬±!"ÂÆÈ·5KbÚ¼~Ñþù2)•B@! „€B@$ˆ°é¡Õ VƒZb›Ã1kz/¾|Ëô›ê»bàöÇ'1mÞ¾õ×Z=ªzø”C! „€B@Œ€›Øù­NØ Ûñè0`ô¡Zëš^{4,ºô:MëQ6ÕCkV63&ܦŒûYõ(‡B@! „€B ‡" ¦v|«6æ5oúäÐ{ÐR{èÝþ¦þ› Ùðóapá£E„Mõˆ²zÔ O r‹ª;åB@! „€B g# ¦ö«6£¼ªÐ+½Âüys+.Ýmd –54³ V67ß夆ö®›Úà¼/dÚüðʹŸª­åB@! „€B@ô@DØôÀN¯—°!ÆLÿådž~#V-^2Apîô·Â¬7ž3'ÞSw¬’Q^»WŒ°ñypz󚣦‘.RíNØ~øáaµ×S§M §Ÿ~z˜0aBnœëI¸€° Âf×zŠQÞnˆÀ&›l–\rÁ3„æ=öØcáå—_NZ:dȰÅ[¤­ž1cF¸å–[ÒcíJôêU þçϯ”,×õ¾}û†5×\3,±ÄaðàÁ¡OŸ>aΜ9áºë® sçÎÍU†åC`̘1aÕUWM¿öÚkáá‡N»b‡±Ô¿ÿÀs¨a¼lºé¦ižo<çLºÛ¸¢ßè?úþkGéׯ_2·5¿Û±÷Tg!нè¦÷€ÅçÍx§{¡Ù&­©•°a©í¡›”¬ÞT®©¸¿L½ëœš—röäK¹ûp bñÖ4Fâp.&l s¸nç«qjgÂfƒ 6‡zh‚ kN8á„ô¸™;µ6±2Oo»í¶0}úô²Õå%k»í¶ ¼`›L:5Üyçv¨m‹ °ë®»†Aƒ¥µyüñÇã>šóœúèG?š^ã¥ùÒK/MµÓýØqÇÃ"‹,’4ìî»ïo¿ýv²¿ýöÛæ5òÀ„É“''ûñŸå—_>Q‡]tÑä©òþûï‡)S¦„ûï¿?!Yâ|•Ž_|ñ°Ã;¤uóé/»ì²šÊôe´Óþ€Šæå›o¾î»ï¾†6g¿Wú7ÜpCCï‘§0žA«­¶Z`{öìðÞ{ï…^x!<óÌ3™ÕåÙ¡kòÔSO%éíX[! „@=t aÓ»oÿ0pÍ]À6 } V½zõNê<þ¼0»`•1ãÅ»Âû_æÍ.ÿÃWOC•w!µ6C6Þ/ ^ß……äØ{÷¡K´û.È‘²8I5„M–56q9vž»AØXbOöפø¨ ›-·Ü2xàiƒ^ýõpÔQ È®ôd“vj%lbežêA4¡È•“õÖ[/¬¾úêEI>øàƒpå•WÓA×#÷±›®ï“¼5Xj©¥Â†n˜&÷ÝwÃí·ßžW»qÇx0¹úê«r¢f=ö°Óᦛn ±`Å0zôèøtÑ1ŠVZo½õVÑùJ5ÇÏLÖÓÈ ”rúýÚk¯µÃ†l[°¨Ay/'ÈÿùÏ2Ç£åûøÇ?žXeÙñ­·ÞšZœ´ú¸ªeŽï½÷Þ‰õíźíÿøGÃ¬Ü ÃflyÎì´ÓNaàÀe‹å•WÂÿûßiâ÷ŽgŸ}6!ˆ;$Ô ! „@ 4°Á}fñ­¾UˆE²à«Y©:Ο7'¼óßßÖíNSª|_ˆ@µ„M-dÝ­Ò&&Z¬¬R[¬lŒ|a‹dÆ%ÇŸ7'Ë'ë^íLØÐž“N:)Ð÷|q>óÌ3ÃC=”Õ̆Ÿk$aCÝQÊÉî»ïž|]÷iDØx4Zg_„MëôEµ5Ye•UŠ›Y³f…+®¸¢ÚbÒôX!`€`Å€¢‡@˜KI) ¾ŠçÊ€ Âê&¯ì¹çž×HŠÿýïáwÞ Xïõ$ÁÒäÓŸþtÚäfX¿t5a[Å¤ÍØaó_|¸þú벦èd9 vV&X;aõÔHéJÂBj·Ýv+r©åCÁ‹/¾˜|Ë.»l‘Š톸cLÄÍxü¶ú¸ªvŽ{Ò,ˆ]ƒEQ«Ë6Ûl“<'|=×ÔË¿VX!µ²467&+¯¼rØh£ì0‰¹ôä“O¦ÇÚB@Ôƒ@Ó›¡›X kŠ_¢¬¢sß_ðë3p˜*Ú¾÷Ø•񾆢s:hÕ6¶Ä¶Ý}ÎÔW”ñ¿H‰;o[ˆ›aÛºàÅ›óÕ.éÜLÂ&¶¦‰­n¬YÛj›å–[.}©ãkšä /£|•ĵÇø@ãb@Vâx<øàƒþRù‚ë/I¼â[ÍW_‹ûàóÂûWãeq ŠH}Mž{î¹Äq‚Å çð»ç…³Zi4aƒ;øqã2«±õÖ[‡e–Y¦ÃµøEÙ'àëä³h¾ rL𳂠ò‘—v0ú‘}â.мÀ[,KG¾•VZ)¹Ö¼ââÅW×W\1ýºÇ1xÛWXË϶ںú¼ñ>ã:› €fð`ÈøÂ ™6V,-[,¨· Jòĉí0ÙÆi(Ÿû˜4ƒ°©µPD~àBM+èF¹¯§‚¼ô-s‰}^ä‰w`x«Â„¹G¹î%#GŽ´KÉÜ¿Ô3.í™MBÆVVp ·ú„9K<ˆ”ޏ_Hë…:Óïü§ Œ{æ÷a˘4±:So?×Èc_”ií¬$Œgê‰à:bqj˜wÝuWr·Ò!X³@Ø"ô ÷Œ]hó5×\STçø¹à ¡¤°Œ??i²âkQÇ7ÞØn&Mš”ÄR«f\¥™ ;µÎoÊà9Bù}¢ÿ±c|1Ock³ZçxL|0OƒµÎ/_©}ïÆEšØ ìpôqñ¼û.y˜“›o¾9»‰߈ßq‰B 4…°°ÒGðí¾_T¿¹3§†÷¾4¼ÿìÍÁ‚ |xàÊÛ…ÅÖ+ø¼öZ”~Êͧ,8n):§ƒÆ `/ÿ•^¨qg[b›ÃÝMç‡ÉW^’¬±„6KíñëÂa/;Þ¾õ×¹ÝÝšAØPsÂʦ_!† .QXÞÄ$NZéh§ÂæÔSO-úª{Ì1Ç„ã?>5¶¢ù2yá…†ÿû¿ÿ kVpò‚’tÆg$Ä?ÏËça‡ÖXc :Ýçœ{yKüñðƒ¤ix‰ÂE !¶ 1nLþö·¿%V*±‹Ê õ‰‰ËWjÛhÂ\p‹â%Ù /S˜Ÿû—x»^а¡Ýž|±ôl!Lxé2…Ì®A’¡$š ¡€±Iü+ –R€˜[õ/ƒ¿ÃÒ±­¥®>¼›ãC Ò6^Öcáåþßÿþw‰Äs¤RP`Æî*&1.Í ljéˆâUø>°:³P&vã‡u#,ãÚ6^;vlé…›Ú£ð¡ø™ \ÆXó((bYÂÜ…ä4¢È§AÑ Yó†tÌñãǧyã:û²l?o\“}öÙ§nVF¥­)LÛn»m¢ÈZzˆå˜Ä¢Þ&y\5è7sŲ|ñ–gÑ%—\’œŽÝ² gè0“¸ï¨ýKO=±Ü ¼Äsr‹g’[––1xÏ=÷$¤+–‡Ys8,ÕȾûî›ÎÚyÇw¤Ù³æ}é·üxƈ×Kü¼ŒqóÖX–ÏÈŽÁл±Y:Æ÷¥MøÍ´ñs6&¤Èá²Ùf›Yö¤<éÉÂN\Ïú´šqEyõÌoHPKR&F’¦Ö9Ž«œÿ€;ímʨe~•j¯?Ï3Ÿñì%+.UÜ6Þ ˜[&ñï!s‚¹!B@4Æ6…X5Kñ¯¡w¿…+Ìzã©0eÜI‰¥EV¥û *|™Úñ¨Â2Ñ žy³ _/üBAkêøõ%« Ë@^ÂfØŽG‡£.«ûî#—…i÷ü)׆ŒýJÁ=j¯4íŒ wÆÀÏÒãr;Í"l¸§‘6vÿ¼d éë!lø‚ÅW¦,Á×¹ù‹%dYâðrñ»ßý®è¥(«Lˆ–ñæe ©†°áK¢éòåó5ôCÉü*êÓùýF6”ÅÖD^b¥Ì_Ë"lò¼”‚?/^¶Ô4eÆ/Ÿ("^Ù÷ÄD¥à•(Ó±b6µÖÕcïÇJ_V=|¬-ˆÕ`¿ ú6[šV l*õOüeÔêo=Ñb×°dÙyç;(¢vmŒ«/'î×ZÆÝ.»ìR´Ú–¿¿í3w kÏÎÇq,m¼e@a¥×9NËq†ç¡w‘UN¹s y$¶°a>W ŒB_b+”q¿SOO<`ÕÑçŸYmáÙ…Åå™Äs—4ùYÂøç¾Õ(ìYåøsžhó«‘&ÏóÑ—ÅØ‚¤¤Ž&囵ÖZ«ÃÇ \OlÙoÈ \šÌZËÊô[~ýuOØ`MÁ²í&ÌI?g8×ò²â71¾ýï>ýH?T3®ê™ß`AÀîRd¬µ‘­Ç –9N=6žýïA#ÆDµóË·­Ü>õõ–‚<'³ž+±+Îñ,7˜…$4i§Õ±¬ÎÚ !к4œ°´Æ'Ãâ[~3mñ¼Ùï‡É—;Ì}·ü‹UŸÁ#ÃR{žz÷]¨Ô¾sû™aú×¥ei§1ä%lbw¨ÉWVѺÆj¸ÀÊæ7vX•[T3  4LÅX) Â&¯ÔCØp^`x±‹¿‚Úý¹ÆËküâeÃÅ_œ$ã+éç?ÿyË’'Xw@°«Á^–H€¥Œ}­¯†°±ÂK7—_~y¸êª«,YÅm£ÜP°¸~9Q^°=Î1a›½S& ÷àeÝ¿àÒ'X˜{BüòIÞXìe•—s^4}¿Äi³Ž=aSO]³Ê¶s±ÒgçËm=QÖ.„MV{¬pÀBÃ÷×°Ncüx¥Žr°.ÃZ!_]ãùš\,ó§Ñ„ Ïs3±Û2þQ<°4ðmcŒóÕ›kŠ­Ÿ'<ƒ ‡ÉG™Þ"‚ó¸#1±Hâš¿NyV.Ê„J91‚4¾Ž9Á<ô÷`.šøà­vŽ-yP’)÷ˆ\o-ˆÕ äS¥`Á’Õî¸Íä§?yÞYûKÍ]®Çeúû[þ8 xó\Ï+>®'KÈŸçù߇ûƒ#s)EØ`͆Պ~ÿî¼óÎôT¼*“](‡'+,}©-ä:óÞÿF”²Rò–HŒ;iW;®ê™ßXíB.yaŽƒ·Í Ò‰ëµÌq\$y–š0¿xwA1&ª™_V‡Fm±¾ô´”kÖCvÞM°5éÉq® m…€h 'l†}ü¸0`ù…¦ÇÓ n'(ÉydpÁUw“ï Sn8ѵmy ›Q¢”÷Jïêû¦'Ëì/ó ®P&[(ÎW.åÂk¾nq`á…©Š-lÊ¥óyl¿Âÿø#<2yáæ«/Ü^øÚ{î¹ç&§=ôТ˜ ¸cœp ɵýöÛ/uÇáåö”SNI}ïYåÀ›öò"}òÉ''ùª!lx±=ñÄ7¾ýèG?*2¥ÇôüôÓO÷Õ/»ß(Âsòu×]7½×u×]—ºi 4ûÀ£(Õ>'lPVøZæASF)œë±Å‚¡Ïzù3qLËQöyyãËllOù˜Rð”þ£_x9ŽÅ›zë—ë³”>,(0ó¦ (屛У­úÑN„M©þaÎðEÝ’ìMqŒ¿4£ Û*e±õe0Îp-à Ë,w—F6‚Äxñâ¿ü¢”aä•K#Ý £ürÙ`ä] !³ Lè{,Lª Hjù²¶ÞÅÅ+¿ŒA‹ãçpVv.‹à´k(ªA5òÕÎWÚú˜-¤…Ü ,/Y %ã’¥€Á—~À->ñV\3R˜9» ãÇ‹‰Ï]òóÂ}xfãbéŸo”ƒ 4ËC€··*àº)ëìW,DŒ<ÇB·!“RÏGžyÄô ~´Ïò[>HÈ$‹°á¹>&ü®‚› ×üo 癳ô9iqMÄÚÊ[½Æ?ß9.%”Ï3#î;È4æ‡ÈBúÁ$Ëê¬Ò¸ªg~s_>bøçc‘þB‹Œ#?N¼[iª™ãÄß#½‰Î5bLT3¿¬ØBBÜúq—e¡žÞE˜¹ù%B@4†6ùÂ_BŸ }¶']ö0çí?•*¼Èˇ‘{ý.M6wÆÛáõ¿~)=ÖNcè©„·®ÉÎ$lN;í´4 '?üçw^úÀËöA”*‰£ ¦ø$&¼hqÄvXrËWàsÎ9'½Žâcqkª!lbB†—J^üL %ˆ‘“WEØ\y啉E€½`úØ þe<)敽Ø¥*‰Å_QMìë(ÇY/Ÿ^Ù°V½`õ °µX íDØdõ/ØX øqOZ›cœˆ;…»E€ „VZ&qÿqÞ+2qßWë‚Yã+¯´3&l¨î-¸–äQ4ªQæ(»œøXFF(‘Þ9±K^©òJ6Ìa-žo>€l©rüùJŠ5ic…’û1‮Œ;¿ÊyãU«“±õ–ïÛxLÆV&ñ¸ðÏ/î—U‡F¹pÄãûAÖÐFÆÖ]ÔÃ2R ‰ Æ,$‹·tâÌ;HF{‰WóÜçÞö;BÚ<„ }aäãQ‘×WŽMâ`ØqÌÒUWq?ú1`÷‰ñ¶ùm×Ëmã1£jæøN;íT„ $™yq©Sµc"®k¹ùhˆ1àÃï1•k(>ÿÈ»ˆÿ¬TŽ® ! êE á„Mlá-òT¶ÞüyîÑÓÓä%lºƒKA… 0l}_­;TRÆ% ­¿¬œRÛ8èð׿þõ¢À‘+öòÉ—Úƒ>8- âԘĄ /¹¼p÷†¯¤ ¼|ú— òÖJØ`¶M¬ “˜@â¥ÎJ–®Ô¶Q„ _µyµUu<Ñ‚¥€¹¯ØK­ö„M–ys–ÇñÔážñËg¬ yb¿÷ø%•´õ„猰©·®”UJb¥K.¬C¼d)õV·v!lJõOs€vçfÁ¯ˆâ]<†ñh$aãÝížqpáðŠ1dŒÅۈǀ•AÚÁ"¥œT£ÌY¹¥¶>&ŠW°=‘cA†K•a籬3…-~&’†ñÀ87kËWnëŸ%¤ËcaSjcÏrÎ{²2&l|>ö)Ó[ƒÙu,0}<8~[³\½br¡aÃ8Â5‰¾óþÖ5XÙ˜d¡­4®êßvoÞ x_€øâ™ÀûG„:nO5sÜ»ÉÑ?¼G™ÿfÖ2&b¦ÔüŠ]“ ¿…Tçw¬œd‘5<q§„“! :†6Ëð¯‚âØ;mÃ+çíVøu——Ý)äUÈo2¿ïUòKŠ@^¦]ƒCÆ FÖdWM°aË_K„Œ7¡¯•°¡ï~øÃYmXýâm­„ «VaE`ÂKÞOúS;L®"lxÁô. XN ŒÄçxñô/Þ°Á¼Ù/cž6¬ÂŽY_Ä/ŸÞMÈÁK~lžù>\,¾®\3R¤ÞºÆ÷ñDZÒgD„OþW¦96׌v!lJõO–õ í«$¸àžçÉòÄÊŽ•§k$aÇ ±{–Ûz¢“ež!A³ˆ +å —ALü½T£Ìù|ìéÌ}Q&=¡ájâdêÁ×m„•ÕP²+ .%İ¡¯ '¤ Öd<òH’/3Vºª%lbåÕ,ȸG¼ 笞ì›0–í<Ï ³°á:±xP” G¼‚bym‹˜'ró*s–ßoã¾õ×òìã’—l¡<”8Ü}û²¬ÝJÝÛ?KH“‡°ñ.›¾Ü¬q‡›¡=g,m¬¼zk‘xîÚœ´¼­DØx¢ÉêÇ6×þ9_óùl?Ëõ'vCò˜Y>¶q`âR„ ã%Ë ŠXA1éËÇ‚…>2)EPTWõÎo>dø÷«O©m­„ n³C&¼{ø ãñ3¹–16¥æW½„M‰ÍïÏ>O&[[µB@t 'l†m÷ý0`¥¦uïñkÂÔ;ÎJËí Ýâ°Øš»¤Ifúh;¬Ù%ª•-lxãÕ è "þ%ο {Â&Žq‘õõ91c'~ùôŠFœ<&$²ƒ8N eaSo]ãúøãXéË"ˆ¿ƒ¢ëÅ‚“f)ÝñWä1cÆ$Š™åo%Â&&ÊGŸÇê[nË’ÌXK˜”G±…R9ÂÆS+7¶ð„M¬xf¹µY9•¶X¹`“~ÅUůE^ðñn(õ6±b]©nþºCÔ™@ŸFÄp 뛘ü ¿wÙà8VN9WJü³„4õ6ñ¸£¼qãÆu°‚|÷.Cþ9ÏÝV&lh_ü\à\8²Õ¯Ò¸Š1Êso+¬ø``sƒó)Ï<óL€š¹±xNäã±ûq°XôÀ$þÍä|µc"/aC»ùÝñí¶zØË«¬÷߬ÃKXÖÈ ÊÐÓV®@ á„Í€1[ö#‹ÚòæµG‡^}¤è\|°è2ë…á;/t»àú”ñ'‡Ïß'ÕqTCØ XqË0l‡£¢;Îï>ry˜ñÜ­)qQ3`¥mÂàuùªÔ«(ý”›N 3^¸½è\¹ƒF64¬<†aS®u5aÃð_ýêWi5Qðú“Ÿü$`Ñ`Ò]-lbókk/[s[bß¿ {E8^Ýå.˼ž—7pE Š0ÁFâ—O¯H% ÜŸ8h$—ž~úéÄ¥SuÿeÒ²aSo]­¼¬m¬ôA<ý‹i¬0xe™•RXíÆË½÷Þ›®µsñ^ŸŸ41¡Õ™6¼Ð£Ðx%0V|©#_¨ýª0(5´#þªLÚ8NE–âé ›˜£ OPGúÉ»óx†U¨C&YcRÃK(+XË0ü 2Nü7É"ä|œ“X™cüø¸VNÖ<Ûm0å K/„ya}ÃüÃ"¡Ì”ãZ­ØP\xàЍ(þYBbßG–9¯B™5îb‹ãb]á]ÅP0!£xîÆã¶•,l¨¯¯;Çô=Á­ß9ç—ç c ²¢ÅžÉäá<Ê´Y>0^ H½0v°ŽÀ•…ø-Üׂ'mLØPdŸ¤‹ƒCs.Kb2·T`âJ㪞ù)š&YV-ÞZ•t•›Rs<¶"´ß/»wü›ÉùjÇDÞùe÷¬vË3¼ü˜dì𛯶l¥B@Ô‹@à *´ÔÞg…¾‹/—ÖmîÌ©áíÿü2|ðröËÑ¢Ën–Øö{¡Oÿ!ižÙï¼&_ºÐz ½ º¨†°áfC6Þ/ ^ßšîûîC—„i÷]PUÞj ›7¯9*‰W)cqkدǚ&«Â]MØP§ßÿþ÷—yGV<`å ÿrÏõîJØÐ¶x9fÎÅʪö„ ic…Ee²‚—u^Ü|œÿ²¿|Æ÷¥|‚NBZø—@®ñâ‹Äç““…?þ…·žºZyYÛ¸\ÒÐNVÏ MVñŠ(±Iì.Åx$? 5_9 Ü襕êJh¸:!À²Ò¸¦ø~2R0‹°"?Ä ¤Yªøö³ï ›˜ã:c2b… ª±ré ›,óæ= úUlâ2ãcÿ,áZ=„ ùãqÇ9ê‹»(êô­È³ ˆçn«6´ƒq‡ãå¸\Ä„yÈS°ó‚»Á˜¿H¶œçº€{±9Á9Òàîèç ç÷¬¢VJ œL±WDéÏŠ}Ri\Õ3¿³W'^XÅø<´+&lòÎqÿ;?ã)7þÍäR͘h&aC;±ˆòÏyê‡E¤M–ðœäY.B@tM!lú/¿iXòãÇv¨ÿûÏŒ/XZÜV°Êx!¹ÖwÉÀ·WÙ¾CÚ·nøq˜9±xÅ’‰t¢&ª%l¸I-¤M-d ÷ª†°±¥ºýÝFàTS÷­$­@Øì·ß~–-µzóbá¿>vg«”/ñWLÿ26Y+@PV©—zoz¿|–#l(3«®œ7$ñœ÷„M=uµ{dmc¥/+M|Î+\‹¿¬Æéããøe¾+-l¨V&X ùyÃùR„š)ޤA¶ÜVœ-ÿ×6(q`êò¹CB±R‰I)÷"°Ž•bò`m€… ×âe͹ŽÌ</Ã:&X+`’%¥‚½Æic—;÷Ò»ï¾{šÜꜞp;YJ¼»œ¹›µjRfÂOúg §ê%lJ»Ruˆc¦Äs×qË“q1ÑJ:Oˆqì­§8®UâçcžrâJ6”•åBêñpa\Æã×׃1n« rÞ6«­¶ZBÒúôyö}x’¹„åY–äWµÎoî牻?϶˜˜°k1a“gŽÇó"RÛK#ÆD3 ›x>ùº×–áw@IDAT—Ú¯ô»_*ŸÎ ! jA )„ ²áç î(_¨¥N…%˜ÿ¦=pQMy•©2µ6”Š{ÔÐÍ }- z›u7bÖL½ëœªÜ |9Õ-¶\··¦ÁÊÒ¦Öà¾.~¿Âæ¿øEjòOtPºÂ ÇåV‰BYÁ’Æßþ#\èfÈKKŒz%ŸÎ3Î8#üñ–-ù{ÔQ ÜÙâø6| <ùä““´p@QðÕ .¸ yy·‚/'t’&þ>é…;\ÖÛ¾vñÂéÝYâ¯ÿTÅ¿ Ç„ ׳L 9K쮿|æyqã¥E ~Q&ïm·Ý–ô§¿fŠ«Õ¥ÖºZþ¬mü’J;}ÌŒ8O–•c _wŸ—wïVÐj„ uå+6[Yä†o cˆ~`3©¤2.écOØp®”Bfé!?ü×þ˜4"]¬àZÞxÇ–°%°KõŸågÞD”àæ^âÀËv-/aƒÓldóŠ-+;;69†•b ñ…œô¥,(¬^l!žˆ'bÏ­Ô¾–¦^†2òŽ»øùCÞxîz²€ë­DØ`åÄÜŠIQê‰0ŸpIõýçxÌgãõ`YÆodV&¬¯ïŒ1OØÄÏwKSik÷Ç*w*“,²Ì®åW¤ñ°üñ6žß•ò¹ï—˜°¡üJs<~~a…„k¬—ÓZÆ„¨ö…€èi4°ÈÁ|& ÙèKUa:íþ¿„w¼¸ª=\^²¾Æò2ËòͱٳW2)/ï×z6ܬ 0(›@”¶Äwü%”ºRg/µÔÕç÷³”>”H”=O^ HóβßY‚âC hO ’ŽqI¼\rLbÂëOFx+©Ø%ÅŽ8•¤–þÁ …¥~=¹d÷±öã®Ã~,`¶õÖ[wNʸ@‰Ã"À»–Å„ åA6пžpà^dŒMâå˜ÄÊ«‡øÀšË÷]Ci¼ë®»Ë;g[bcár7ôÖ8ŠìÇ‚• ãÚ+~±ÅDœÏŽ}¬ža×\sMrÉ+𥿮•a[úòr,ni?(ëyãÖX¹lý³„cÆ¡'î8Ç8÷.&YÊ/é¼Pgâ\e;ðf1†b‰çîí·ßžÆØ"-Dñ‘L²HƒØÂËžG–¯–m¬œCXb!E;ý2í”M½pA„DóâûŸóžPá˜9¯ú^ô Ÿ .©ô A“¸dA¼úqGZ¿Züü°²*m­âåØËƒ<ãÊî[ëüfð±À?ÀŠXQ`ÃÖ&¸³òœðRiŽCŒñbB2n½4bLÔ2¿|ÊíÇó©\Z»–÷wßÒk+„€¨¦6TŒø46ýF¬R¶ž³ß|&L½ï/%ãܔͬ‹U!`J®ùÃW•¹:ðªÂ]z…ùóæ†^½û”½£YÓà…ô+Ä®Äñ.Re ¨âb«6UT½K“. læVòÊvŸèÒÊ}xs” ^ìQ"PüßZë™á­VPˆˆUá…(3&•, U×ø%Õ¾SîÉK)V2^ ²zÆ[ˆž-à†UX#ð‹ïÑìc””<”~Ü&PäP8ó„…)¤*Ö~¾¶W"l¬|ˆ#î… $dIbiKm©}ùCßQ—,¢)+?õDù‚ŒÀ²å¶´c¬ÔRïjîS)-í`,Ò'àÈo]Wש\wàθ?Èú-ϼ+WnW\ËRα A h'„$ s¬‘Bù=^ èâûÄ:åH_Vžý8†DH£¤ÖùÍó™¼<ø«Uâ9¯¾–µúSWމZÛ©|B@VB é„5¶ÿ²¬26 ýX ªßˆäôÐ3ÃroM ÿ|þ\5T'l[°±¥ÄçMŸzZ¸êQ)h̢Ʈ7#à0e‹°1„óo{/62 Úù´PíÒîùïÐÚ)³â!`­B`Z”‹Ñ£G'y½eEÖ×ðf´²aÓŒûõÔ2«!lz*Fjw÷A œrÞ­ŒŸkXha©Æ»î¥Þª:ű¹ê©g¼òR–û\=å·R^ˆFÚk1vÅWØaºíê1‘VD;B@6E ÓÃeú¨÷׫ÏYÜN…g\ŸWlB™^ÔNÃhuÂfØŽG‡£·sÞx2,2bõÜíÇÚ†ñÕ,aS=²}WÚ!ôßè€0cÂaʸŸU_@›çàK2_ ½9z¹&ñEåë†fK¬Øx ›fß»'•/¦'õ¶ÚÚÕÊ9.@¸„å•,wÚ¼yãt<çq53Áʦ» ¤kŠ6bYIðêXºzLÄõѱB Ýh Âæñ¹/‡gvdåÛ Ìv©o«6ÄÈYb›Ãüw&†Þ‹/ß2°Š°©¾+n|è3|Õðö­¿®;¶Qõwo¸ã$+Nޝ!_'‰õ€Ézgˆ›Î@9$HóºDuNt!Ð<ºZ9‡$'nÔ˜1c2cù–gÅ“ò׫ÝÇz‡ø8&qì;ß]¶¸òâÒkB\¬™béê1×GÇB@vC K›Õç.Žš¾~VŸ~Fѱš‡@«6´ÜÜ¢ªµ²ijr‰ª[³®é©îP1^óe‰n–2&¶Ö4˜ëCÐ+…•»òƉˮ剺˜k·Ic X¨=s)™~&ø­DtGxÆÈÝ„gJÈÖ®5s IJ`â9P·Sb3árJ˜ƒ²n¸Í’ Sn:©æ%ÞóßQ)…€B@! „€Ý N'l[ûÓaö[χ=>v/ü7¬´‘4v l@aÈÆû…Áëï›Ò –6"lòM³¬!õ»]¦ÝwA¾ŒJ%„€B@! „€)NØ,µÇéaê]ç„,·¨Kgß.uwZ9í4v!lh½'mˆi3oöû¡÷À² –¬¸äw£ÑaSQVƒê3ríÐw…­“˜5¤YS/]B@! „€B@TB Ó ›Q^6XÙÄ«EQY¹FUê²ú¯·aCkqºÙA¡Ï áõ7^%4bÖ@ÊÎxáö¦ßK7B@! „€B@tWº„°¬A¡CâX6œ“¥ (4OÚ°1$X=ªÿòcC¿«È›% §{Ù%m»ùaîô·Â¬7ž3'ÞÓcWƒêÒ.ÐÍ…€B@! „€èvt aŠ67+MšŽ°g`EÚt€¤a'Ú•°i*H! „€B@! „@‹#Ðe„YÙ|ðÚ£aï~›†½ûŽÍ„JÄM&,uaS|Ê,„€B@! „€B étaCË m&_qhÒÈr¤ DÜ$05䛆À¨B„€B@! „€B@4 .%lhÕ+çîš6®iCB–ÿ~|Þ+ZM*E­ú6Õc¦B@! „€B@! :–"l¬áyˆÒyóøÜ—“}˯myDØ”ÇGW…€B@! „€B@t5-IØ(y‰K/·)C¢üV„My|tU! „€B@! „@W#ÐÒ„q³fïQaÍ>£ìTÙ­ˆ›²ð6åñÑU! „€B@! „€]@[6$ȤԪR>­ˆÆÂ}6 ±ÐžB@! „€B@VD íbòF¤GlÁ~½„ÍrË-FŽ_|ñпÿЫW¯Ž7Ñ! : 0þü0sæÌðÎ;ï„I“&…—^z©CB@! „€B@€@[6¾ ËÅ»!8ñ‰3/÷É{ô~­„ DÍj«­ УñSã…@£˜1cFxê©§DÜ4 P•#„€B@! º݆°±>)E܈´1„BM1lÖ^{í°âŠ+&… xuvýê °Üô!aØÜ¡Oè½°ðNØ;¸ÏŸ;á.º…hX¦ :4Œ=:½Á /¼{ì±ôX;B@! „€B@nGØX—×ÏAŠåµj-l£ÒÆcÔÓ­lòZؘuÍ𩋆î_ÚCØ¥û"lº~ݼI`ÉFlYÙ4 `+„€B@! Ú nOØÐ²²)•y ›7Þ8,½ôÒ¡Z묘 Êš%"lš…¬ÊíJÌÊæµ×^ ÷Ýw_WVE÷B@! „€B è„¬lŠGZ^ÂÆÜ¡>~Ëð0bîbÅ…dÅAK­Ø•‘µªS"lª‚K‰ÛVZo½õ‚Ü¢Ú¤ÃTM! „€B@!Ðdza†±•MOv‹ÊKØìºë®¡W¯^á³ã—¯¸t·‘5c{÷Ý4<>ï•p鬻K°o¤ˆ°i$š*«•øèG?æÏŸ®¾úêVª–ê"„€B@! „@ Ðc›ØÊ¦'ÎKØ|êSŸJ†äÆ®84°ùû o§i ã˜,KÔ¸#¦Fà ÙúõëæÎ›ü¯½ål6ÈUW]Õð[l³Í6auÖ |ðA8÷Üs^~O/ð³ŸýlXrÉ%Ä Â5×\S}ûö |p’çöÛoOâUU@ ‰¹÷%f÷4Ù`ƒ ¿øÅðüóχ3Ï<ÓN7d[F ©€ ii>ýéO‡í·Û.ÜpãUÏ¡–n˜*'„€B z,afŸ~FеoÖf6X,!ÞšÆHœ,Â2çÒÙXà,›äÛ»ïØd›çOg6¿ÿýï‚ã–[n þóŸóT­!iFŒ6ÝtÓ°æšk†éÓ§' 1MP´+É9çœúôé“(üW\qE’¼wïÞáûßÿ~Xi¥•“'tRxúé§+ץ׳ÚÒ¥ʸy=}•Q\h&aóóŸÿ<¬²Ê*‰ÏÞ{ïu{«¿ýíoaÑE o¾ùføÚ×¾VUIÇgŸ}v’çÖ[o ¿ùÍoªÊ_KâË.»,ÉöÄO„cŽ9&-â¯ýkèß¿rÌ3ðÆ‚òÜ(©£FÕAå´&K-µT8묳’ÊaeøÕ¯~5L›6­5+«Z ! „€èDz a¦XLž?5L›?3øün ÏΛԉp·Æ­šAØdYÓaã­n@Àγåyc (®•’Î"l°B€ü¸ë®»ReªTqž{üñaÙeX¾L^`!0î¼óNºheüè£NÎýâ¿(bȱÇÆŒ“ìó‡²~ö³Ÿ…gŸ}6=×;X÷¬¾úêÉ­ž|òÉ0kÖ¬’·-Õ–’:ùB½}Uªº"lJ!Óúçë!#Z•°ùÝï~nºé¦†_F «„ *‰$ ÁÏß~ûíðë_ÿºdºZ.P.å#W^yexðÁ‹ŠaS‡„€B@¤ô(ÂfÕ>K‡EBï´ñ¯Î{;¼3ÿýä˜X-(²=AšAØ€›Å2†sY‡±lòçÀɲġŒXº#a™‰‚K2gΜäKýàÁƒÃ AƒR.¼ðÂ’ Ô—¾ô¥°ýöÛ'ãØ^ŒÉøÇ?þ1‰EÄ×J¾˜‘“ÚI;¸Zzè¡ÉÝN?ýô/ì¾¥ÚâÓtÕ~#úªTÝEØ”B¦õÏ×CF´aÃòòæaÓH©£FÖCee#ÀoÅСC“ߟÏ|æ3Ù‰j<˳͞ÿÿüç?Ã\С$\¢H‡U×u×]×áºN! „€è‰ôÂfD¯!aÉÞ‹¶KôZ ¿=zx}Þ; ióæüwÃü¿ž Í"lÀλ™yRÆp5KœøZ–K•剷ݑ°Ùe—]‚¹©<úè£áW¿úUÚì-¶Ø"xà)érØa‡¥×ü„˰¿ñÆáˆ#ŽH.-³Ì2á§?ýi²ÏWM^”»Jª!l²ÚÒUõŽïÛˆ¾ŠË´c6†Dûmë!#Z‰°i&òõ`ÔÌz©ìt5a£~B@! :"Ðí ›½ú…ez-6]d¥°Ù"«„¥ „—Éó§…»æ<îžó\xuþÛaÆüÒn>_;ï·a[ÓÄV7åðma±ÁòÉï¼óNÉ[ÕëEìîóꫯV´Þ:î¸ãŠ+®fΜ9äu‚€Yc5’óßúÖ·’Ø6q"«ïÿûßpÞyç%—½kÑ/ùËðØcÅÙŠŽ—Xb‰Ä Œ8yÅÚ9iÒ¤äËl©|Õ6YmÉ*wÀ€eŒ ¦\,¿üòIŒ„rc «¼FôUV¹œka3pàÀ°òÊ+‡EY$±¦bœ#yc؇eôèÑ ®¯¿þz˜8qb˜={vRF¥?Œ\úøÿÜsÏÆD^\¤OüñN‰]Á2ê¸è½ôÒKá•W^É[Í0lذßwß}7<õÔSaÞ¼yIÞ¼dØÐNp}ùå—“¼"l† ’ëÞ0aÞáƒ0§lÚv¿Ø•„ Ø™ëV6kö•Æò&&qJá\ aÃJ(›m¶YBpüð‡? (³¸À!(ù¬ò§?ý©ÃíŒ4ˆcØ— ¼D#—\rIø÷¿ÿægu­­¶Ú*ÖîÃKögœ¾ýío'$ñc°"19í´ÓîOX×dÝ}÷ÝæâˆOcùyyýÑ~”R/ù_ÿúדc«ƒ™þæ7¿™¼d“êµÖZk¥õ%Àñ‹/¾PêJ'Þc=ÂÇ?þñ4@)e‘6øÕePzŒÄuÿƒ:(¹fâ¶<òÈ#É¥ó 1…zÑ¿ÿýï‰Ò±ï¾ûÝ’‰ •¬nãe“M6II0ú¶C¤Q—É“'‡#<Ò'¯¸_o_•»A=„ 1 Ž:ꨄ ð÷ §žzj‚3Ê"ãÀ¬¹|:”—ÿû¿ÿKbXøó¤g ¨º”òKLHÅ6Ú(ícÊ !m~ò“Ÿd’7‹-¶X2n‰oA&ä#^q4ü¸å:é¶ÝvÛpï½÷V­h¡´Òßë®»nÑý¸–iÌ!ðÊÚÆØAqöbq¸¢À– :ÌxÝsÏ=ÓÀß”A;o¸á†ð¯ý+ ¸ZKÐažqXáARx™ñþûáÆqã2ƒ¦—"lˆ£Åjb¥Æ‰/?Þ¯£zÆ ¿kŒAÆ’ ”r, c²âÒK/MÆ+}H»³ÄpzàRkEÒAdz‚ü;ßùN`<›@󌇸ÄUõ€(z^QÊþÇ?þaYж¥° ßµ×^›éRd¤,ãêË_þrBÒRŽ=w¹ãû„N±&_|qBðÚ±ßr?V÷òÂ1¿IÌ'Æ AòyößsÏ=v:‰[ƒUb)¡.ºè¢ä2nR¸O™2¥Ãï °6GÈV/¸ó¡‚ߨXpíbî!í;ï¼sØxã‹Ú á„›®¯w\ŽŽ…€B@tÝš°¬Ù«ßذIŸ1¹ð½wîóá²Y÷$¤M® mš¨« `3ÒÆ ÌKÖ¾ÂæðÃë­·^bù™àãÁؽ٢$ŤM)ÂÆ,,È÷Ÿÿü§HB.õrÊK¥Õ/ëÎ+ßøÆ7Ä +¼”~âŸH”,^h·ÜrËäŶTù”g–æ~”•v^áÅÿØÂKnüU×c•ôdÊ1ÊL)ùÊW¾Rt)n‹)ìÖ?`—˜™BÀrÈ[Í@ž 2uêÔ"…»Â&)¨ÌŸJ}U&kÍ6(NøÃ_ž³Ž/ú:YŠ8 ñJ J XÅcôBF`fåçþ¬vfBr®Ü=³7[ňv@ÀzÅÓÊÎÚ¢LƒQ¹ûÑ6Æ-J¶,bpSôН¿ÎxãÙB?d6<|\)Ÿ—}¬V[mµätµ„ ó‹ùXªnV>d¥YqΈˆx•(È5³æÛk¯½’:åùS/FõŒAÆ=d­­n•U_¬( ëžÝƒkY½!°, „<¿M¥Ä·ƒö`5É|²g¶åÃB“¹YnœC¸œ|òÉ–%Ù~á _HˆR^xá…²Ï)H;È;‰B@VB Û6C{ [-²jøR¿­«Âû/³n ÿót˜úa0âª2·IâV l,Ð0U³Bék!lȇ Ü]}õÕ‰ÛÅ6Ûl>ùÉO¦ /™|¥3É"lüŠK|·/ƒä;vljÉÁK./¬(_(¦X¢˜DÚj”‘Ó Êbï‚e„$T,?þñ“û àb%ÁK-/ýXT Ô"g€Ç &ñ¼P›¥„Z\Çê窫®J^ôÁf§vJÊd‚p1‚‡vXŒ”x¾4ó›¥Èùz‰;rÊ)§$8[] ÎLiÅôò½÷ÞKÒÛŸ¸-vÞŽi«×Œ?>qÝ¡®X!¼ìƒ)§ž°á: 5y±¡mոÿœäé«rùkµ°±/ë”ÆRôXŽ­½öÚÉ×}\nLÀÎ[Ø@` ìšå íÍ7ßœ(.Œi¬YŒÂý &ß°êá‹5žŒyæýö¸:!ñ—zŸÂâ·¿ýmòe²q¿ýöK­ Y®¿þú¤ \˜,&'J/MGˆýĸF#(ɸ_QwpÇ"a\P7HVhc‹€ã¢ƒº’ד@1aC!ALÑ$ýÃ\Þn»í"Ö®Q~5„ $ ´)ÛÔ™yIÙn¸ajI€%$ƒ‰ lêŨž1i¹aVàÏøÅ%Œñ·ùæ›§c "Ξ]à`ÔCØžô)Ár±ÞÛÿý;|€L`cшŇœŒ{.RîlÌG›³<Ÿø­ÁuñÆ|´þÆ*„ûšÄdŠÿ½ãY@0wðBî¸ãŽ€›,B]èƒc „È€‚›«X<÷±¶D˜+67øã÷âþûïO¬ú˜[¶ ÏÏþóI~‡°ò!/V™÷¾æšk’}~ í7 aCý b¬Ýô×m·Ý–þV . åòûbâ Îñ|¤Þwß}w9rdB¦ÖY±•£­B@®B Û6Ëö¾Üo›°NŸåªÂöѹ/… fÝ^š7¥ª|픸oa ®„e­„ _BQ6M‘ç>¸ßØ×i”œï~÷»éíc†¯{¸S!¼ð]~ùåiZvŒh@!æå%Ô Š‚‘6y 9”qûRšåÅ=P(yaå%åŤ\ ¾ £$#&LHÌä-Û}ìcéK7&óæö…´õÖ['/õÜ 3xHÌíOO²äaSª-¾¬óÏ?¿HQá~,iN{Lãqa@î [cqÝ,¢/(µˆ¹”ÄD× y°f@AÃ*Ç»Øy \”òÆÉaN2P@™‹¦$r?”UÊ¥½²É5o!€B‡…›Ç¥ñfÊdLØpÍH+ܼPª½@¬©Êùj¯ ƒnk^PÒm>Ò^¬*LŒ¬haS/FõŒA¯CÄQÿŒgüò쀨 ßp#bÔKØ`¡Â3Òk.žQFÄAðù1L:®CfP§}öÙ'SÞ…'‹ …D57θOýx`^0®<+­´RbÕÆý³È‰J1lû<ÿ©3d¸y9ûì³S¢‰´>N϶J«D•"l|»xöø üÜŸßU~Á›ºAˆY»ýøàYCâç#Ï"žÌömÓ¾B@®B Û6«–ðþþ¢»†Åzõ¯ Û÷æÏ §|puxjîkUåk§Ä]MØxëpë,¦TÐ]HÜ pÿ9 ð2oâ  É5^ò±Ò‰…—U¾žC…—ÇX T #¼„'ybÅÕÊG4«”ïvRްáÅ›—jƒÀ±X›°†!žH1×k‘@¾<„M¹¶aãWÁòõ{ê‹`Acîž°Áýà? „XÌz">oÇf]dÇYÛ<}••ÏŸ«…°!ž ’å^ÁyÆ/¤ ‚Bã•ÆŽ}G™…¸Œë(ûâî•Oˆ”m¼qoˆ‹+ÜÄP¦°¼ÁúñÄ D)ÊiaìÛ¨‘ÁB½•xšÂIŠ¢‹ÄJ¨Õw·ÝvKɘ°1ׯaeùl 9ebÕ6Fza5@Ùôm,¸5‹OèYѦ^Œêƒ%þð¬7kC\¢â±J¼$~wˆGsíó.iX©@ ÆbmŽÉ OlxbÕçç¡”E”V"l|9Yû%æ>åÉYÒÖCØØ³‚˜L_,X e +¤´Øgž°É"LIÜqÇ\§×g7‰ç“ÎÉýB@!ÐEt[¦Z7¿·þðç»Ë~W6F.}w°}Žké§Z-løòŠÒ A‚ Š tC FØ$þ)EšxkOø¼ì'r aƒ?d’e“\(ü¡î´—wÚˆ¥ƒI9†´qDLY°|¶µ±[qr… £(û¸Ì`¥ÀsŸ©…°)×#lzè¡ÔªÃêi[#˜(æFÆz†ûÃa>üà?Höã?*æB’—°á9b¤d¹²ã{Ù±‘ lêÁˆúÔ3óäµ6Ç[àÂ&ËR…û˜eÏc zëïï ¬>Ì5Ó,oHë]{|^b˜ÕÄ­þç ›RdO¹zUCØ`¹Ç³“±Ï⹑ŠKÂï§Å±á¸VÂË>$vWLN~ø‡ÀÛÜ XpDË„öß½°NTŸª°ž憟μ2<1÷ÕªòµSbSÂK)éÖHä ãG'Û<XžÛ2ñ]³m5Á†-O-„ D E– pZ\~V…B²^¾1G÷¦Þ¤Å‚—ezV¾È^–ùºY‰°á ?ñ`îÑ`_üãrQØY%+¾M9ÂÆòÅåe£¼š5/êĈá«}9©…°±:eµÅ›røÒô_³‰Y‚x†zj/´Ã^ôýy¿_ް©¦¯|™Yûµ6ŒSÜJ)†vSÔ*ð{ƒ”›CÌiÈ;Ęž°ñ–7IÂÿsÈÞwDØxd´/„€­€@·%lÆôY*|½ßaùÞ ƒmæ|â¼7ÃïgÝžŸ›½´kž2Z=Mg6•p¨…¬¡ÌZòa‘Ez@Œ˜K‰w ò„_Åe%Œ^/–Xx¹EÁe[ްáë©™•s/Èš,³x+%ÿû ÁÏø0&]+GØðÒ«b_ê-Ÿm)²† Ŷä©TD¼ –OS;Ž\Ê‚ÅHÊõ„¨ ™šeíÃØÝ¿àVx—(ŽÓ Ãe¦ÖtË8bÙ€A‡=ŽV{#Öi,(7ç¬haS/FõŒA.}XŠÅBûÁçîC扻çJ‘.Þí¬³>¬ºêªÉûÇç>÷¹Ïô¸mþ¸Q„M–»”†ÄcÜêà­ØÊ6×^{mBì[>ÛÚxæ·‹X‹¿UoÐa6†¨¶B@!ÐNtKÂÆ:`‰^ƒÂF}V _ì·Uèß«¯ÎÜΜ?;\8ë¿áþ¹/„·ç/\¦83q›ŸlacÁš!e,n ûõXÓdA]+aCYÏ>ûl¢TòEzÓM7M}0áë-/’&1aÃyâmœrÊ)‰9:Ç,YL\,q ILXy 2Á‚‡aLÙc†åÂÉ‹ò€poHŸ,A!±:ÇsL²‚ ÄNVü‘r„ e³:‹¯D!¹òÊ+’ ÷‚Èñ D”Å `™ULô!¥0™·6Ä„· ¡m¸Ó<þøã©’Y©-ž°G”-âœP\j fÈ.ˆ/#¯MØÔÚWIå*ü©…°!è³­.ñ²ÞôXÑ»/¡è׺¬7Ï[®˜rÁ±íRÐÆ‘)q(ÐXç 8Fú|æb[°D8ãŒàÄŒÙ<â-X²–¿ÞvÛm‹æ'l|@RÈŽi« äbže½ï¸ãŽ„ôµ¼lã{—"”|Ûg©rÆ# œg‚;#1Ž$lêŨž1è—ÁÆuEŸy`¡…ÕIôu5ÒÍb0qëÊø™‹5!Ï-$Æ×ˆ¸RdO-1l¸VŒoÄ[%'>üÃo¤1‚u¤Í¹z æ§=' À­\î3|øð”ðÃýÕ[£ryÉ ~ß›ëÉAáEÃ*]Ô;–R„µ‹ôÌ{~ƒ¼0Îó,ë-ÂÆ£¦}! „€hº5aÃKØÈ>CýG…ú®Æô^*³_žŸ79\?û‘ðļW¤¹SS…33q78Ù ÂÆ–êöKtó÷A¥™Ög-„ /ðügéç,áe<^¶šN”0³°±|©åeE˜2qϱ/¶Xp††Ý+këc¸˜{RVýÉ[‰°háeØVv"óÆHŽqsA¹¶¯÷Þ&NO^Ò[LØÖêË>bŠûv­T[Œ°aÅ*S*Èç‚ì˜B¬¿Dy£ ›ZûÊ׳Ô~-„ eyÅW5–Ά`¬î¸cÁʰ@*˜€¹_ÖEk¸œ±Ò+Ïl²É&ÉR¹víå—^ ‡H/¸âOÁŠÂ—4ˆ„­·Þ:%¼¥ ó䢋.J•;ê )H¬(òá¦hD"mA¯XH9‚âJžJ©‡rŽÐþn¸! `Ê8Â5ÐÈ>+Ç6(ü^lò‘|.¸m˜ÄÖ^™$ ¸Ò&Èò²²ŽŸoÕ6Ä$By5<˜sôsì±x3+#i8¶ýFXØÔ‹Q=c¶óì°%´!cÆ—òô¯̱ë“w'„¬Á 21ø¥ÂòÑFÖ€Wg6`A=póB èÇŸ| ¿÷Ù{ï°ìrË%×bΈxŽ'‰?üÃx)tØ[J1މÃêm¶j’*ÑÊœ…¼Á k;‡\ ›‘#G&®ˆ\£~/Ì{Ê6¢ÌÊ-lˆ1Æ3ÎÊg1O°šeŒ3Ö(¢N~u-Űq‰B@´3Ýš°±ŽÚk`ÑkpX­ÏÒa¥Þ#ð^ V™2ÿ½ðܼI᩹¯…7濦ÎoÜj$vïVÜ6‹°y|îËEÖ4XÙ@ÚÔ\¸vµ6DäëV(Ö~ÊçÅw ¾ð³ï%—@âÂÄ«»@¼kÉZòšó(j¼øã:Ä= wÌ:%&lø[iåÊDliq”M³L8¿`邵I,Üß,xG9‹²†UNF]¤xR_^¨qaß Ö4¸áØK2×PPü‰kƒÕÊQçSÿ/ùË©2N>Üeò´Å¬€PB øk/ð”3}úôD±F±òâcÍ,šþªGj髼÷«•°Æ«)yñýè?0”¦,e…–¸.(‹¥% 9³\òé¼¢çÏÛ>÷g¬Ú lœgÙaï&hiýRƒØQfQiÁyIwÅW” ˜íËaß\MâóvŒ¢oJ¿'l¸Ž…ÖžX±|lSÌ_ú!&l¸¾Ë.»t°H༠ó’çR aCzâjA<”ªi !â•æIØpz1ªg 2îyö©F}báù¹áf‘I0n#µì¼mý˜è,†{3™¥êEæn³ô­I½„ ncÌq/Äd³ øÌ s¿ôil¢Ð~_c†4ç~§âg”ŸÃ¥ògŽß²rãüÞ{ï-z>O„ (H„€B è„uР^‹†¢U£^,¬ 5}þ–¤Glí…ªR€P,)<ËzgYÓ˜ÕM+XØ@ØàâƒðRq€%„BŒ®5BPœ° @¡å z,g¾ã"…/´õˆ_¡’¤\pâ¼÷8Á´Ý»]•Ë‹µmΛÞÊÂÒ R‡€•Ô;O[ô ½ËÖϬip‰BÖ,¸¡Aâx©²Tq±V #lª¸UUIyÑÅòyðÁ¥‹gË,K0iGPê})äÅœ/ÇÄ-igÉÓ–,¦ۜUwÆÌÕW_u9×9È,«(E˧jBk+X,[ 5³¬j²Ê„A)Ã’"-à"ÅAIÄ2« ibnyÊÓ H£cý+½qÚøb ¢åw¬¼ØPJ*ø#Šû‚­=#âûÔrŒ" A™¸šø|j)¯Ö<õ`Ä=냸§ó€gl^‹:Æ.VNjôk#ðZñóùXiÌÚƒe&Äg#ÇŒ¿—ßgŽA¬óüÈGŒ5ˆžÄ„«¦N=”Ï|€Ð­VlœSϙΤÚz*½B@F Ðã›ØÚã³Ó«û’ÙлºŒ¼„ ±/xqþø-Ãȹ ÜÈÊÕÝ,j,M3Sv«6¼K…ñïä“Oöeïºë.wíµ×¦Ú®¡ ™H3gÎ÷³Ÿý¬¡w§úE@D@D@D@D@D '!Ø´s­]ÏòήwY·QEO×­¬“‡òYÕR÷îÚùn^Õ"7¿r±[áVç„U*+%Ø÷J–Š`SWŠlêJNÛ‰€ˆ€ˆ€ˆ€ˆ€´$%/Øt)ëàú•ww{´ÚÂíÐj`ìµ²æ-÷àš×Üû• Ü¢ª/b˔҆lÚµkç6Úh#×¥KïóᇦƷþúë» 7ÜÐ͘1Ã-^¼8q»Æô°!_ÌСC}—™3gúÜ3‰¬èСƒ8p kÕª•{ýõ×ýö¬N+ØtëÖÍoOû%¹/Vˆ6œÓ€\×®]Ýk¯½–3Ì-)‡?˜:ü©‹`Ó»woߦ`1kÖ¬ ËÚvß¹sgíÞÿ}7wîÜœÅóñ°)++sƒ r\cÚ* ¤e" " " " " "PH%+ØÐ¡êèÚº!åë»ÃÛìè6(ï–“Û•Ÿ¹W=ãÞ¨üÐ-s+SwÊsVÚDW¦l.¾øb/ž$ÆI'äB1¦oß¾î”SNqä( mõêÕnâĉ‰¡/:ur¿ýíoýv™M×®]ëžyæwÉ%—d®G¾Ç4räH7þ|÷î»ïfêM;ÁhLp€kݺuf/[¶Ì‡M:5³<œ Tè´ÓN«ÁŽQ–Èôýï?gÒám·ÝÖ¾„àÚ´iÓÜE]äþþ÷¿×9é0×þÔSO½Fo¼ñ†ÏCÃõ -I°ÙgŸ}2¹hÎ?ÿ|7eÊ”p³Óä­a›$»õÖ[Ý 7ܵú[ßú–Ûo¿ý²®_þüç?»>ø «¼ÍüøÇ?v;î¸cÖv\;Â~÷»ßeÄÀM6ÙÄ×cÛE?ßyçÏË–S±¡‹gŒ‚ÍþóŸFµËö©OÒ&P’‚u¤ÈWsT›]Ý ò/“ìÖv)߬üØ]·ê G^Œ^)ZZÁæ²Ë.sx¼$bë0¯³Î:^HÀ»&Éî¸ãwýõ×g­Þ`ƒ |‡$#7 "–Ï1ýñtƒöÛ]sÍ5îî»Ó'˜FXÁK(Éhÿûßÿ²Š îüãÿpxvÄ"Ã_#êPGt”(¼‹e¬ GëøüóÏ]ÇŽ½‘ï(QÓßþö7—ëáÕôË_þÒámûNlößwôÑGûbóSO=e›Ä~r ¿þõ¯Ç®ca´} ÖtÐA‰åW¬XáŽ?þø,ï òòr?Ä89IFÎrÔ úÐ>h'I6{öl/B²žD×?úѯ ei;2¨/’l€Òµ¬£Û£õîàÖ#óbtÛê©îÁÕÓݪeymל §lË9æ˜c¼GçHrYóª±ð :É=zôðèOž<Ù†²óÎ;»Ñ£GW¾ó¥çÌ_þò÷Øcepá‰2bÄ?O}&LðÉkÙ1ï dÒ¤I^èHsLlsóÍ7gö›O"YއãÂèÜßsÏ=î…^ð3£FÊ eWÅGáËÙ wbAËÎãÔ@IDATsEÈØb‹-|‡ß±>*Ø´oßÞýóŸÿt|bxä<üðÃ>œ „…ÂV>‚ Bž9 ϱí#<â¦OŸîvÙeïb¬£^%…lõð‚ïèãé§ŸÎx¥ þ-]ºÔ/‡3‚ †ÈeÙþk_ûš¯‡uœÇøÃL¸Ø˜1c܉'žÈ*·hÑ"/~޶ÕV[¹o~ó›î•W^vgŸ}Ž£íÚ„Ù(Wp¿ôÒKý2ÄB汫®º*#ÄÝÿýnòÂ훺Ù¸¦ßûÞ÷šLD@D@D@D@D@êC d›þå=ÜwÛŽIí]cñ²ù×ÊGÜœÊOmQÉ}¦lìÄkËsØa‡¹C=Ôg)F’ °$¼60:äßþö·3«- a8„ …†wÏÙgŸíE—çŸÞ‹E¶¾¶c¢\8ìôÕW_í…Û>×ç~ð/`Ðù¾ð }Ž—°üW\‘§(‹Ç zk 2 XgŸõˆO„Ó`QÁ&ô@A( nʘá5‚‡‘‰_ù6¡· bû²œ8ÔH„ ° %ØØy¤Éasã7fŽ!%:ôwèi…õÀøêC¯*¼aæÍûÒ[Ž•ð't ÁköóŸÿÜÉæÊaƒÐÆuÇÈ¡ó‹_üÂOÛŸ=÷Ü3#ÚÀññÇ·Uú:(YÁfpywZ»ý]»²ÿÏ?’†ÐŠªÕîÜw¹™•¥)Þ,ËZ° …Ø‘9NdÄCŒ|MìC&" " " " " õ!P²‚F‰Jn…lØ‹…ÛD 'Aòļ[8à^dÂÈ›o¾™¹‰­R°¹þºë\ûjÏì®»îò>a¾ò¼0 6M9$ÊÂÖ¢Þ(þ$jùÓØ‚MÈ×F«ås®îß¿¿ÛgŸ}¼È‚pˆÅ…†¥lÂ1báqŒxÅðôf„¼Y]¶LŸ" " " " " "/’l¬“?°¼·;ªÍ®©GŠb„¨ëV=áÞ®ürd™R}K^Á¡ä­·ÞÊjg$…ehdx1¼q¡òy0¬8Ã…cÑv(ØÄSÖæ1&N 9:ãŒ3|2cª ›0é0 ÃG-¬^II‡Å)ô(²zöÛo??Ì:óIÇgeÃOFÛÚ|óÍý"­ð≹zíˆû‡dÐÔ5¤`3qâD_x,!£gŸ}Ö‘Œ¹Æy]W'Èĸhž!YÈucCЧÝï¾ûîëŽ=öX_<*.¦­CåD@D@D@D@D@D $P’‚ 'H笣k놔¯ïo³£Û ¼[xÞ5¦?¨üÌݸê÷Få‡n™[YÒ! ù 6§œrŠÛi§<³0ÿŒA<úè£}²Wæã†õfù¯ýkï9Ã4Ã)Ó)F øÍo~Ã"žrÑEùéð…ÿD½wj;&ê |…\2 íŒ0F€ ‡o^´h‘³Ä¶vLxR0"á]X(Ø 2Äç8ayܰ޴I' ë€@h7¬7Ö„ 2!>ù6áÐëK–,qßýîwý1úUÿ!¼a¯8q,&Z°¡-qý0F¢Š¯Íò›nºÉ3æXâ†õÞpà Ýyçç‡ê&11ÞMI¯Û´iãGûÉO~’uެ¿ôÒK3Þ0ÇsŒƒ…™ 6q^H ‹îGЪ>žëª“Oš4É6óŸŒîeÂÒ+¯¼\=ý9Yë5#" " " " " ù(YÁÆ@t)ëàú•ww{´ÚÜíÐjS[œõ9eÍÛîÁ5ÓÝû• Ü¢ªìä¶YKd&_Á&ôÁÓåþûïwÓ¦Mó£ä0<7ü+®¸"3RÞ?ü°{ùå—}®–CÆwôëçé… y[Hk⣑´uæÌ™nàÀîä“OÎŒ,õÈ#x±Â.AmÇD¹¿þõ¯®wïÞ~“8O«+úŽDžŽ ñfçwv «æD êùÓŸþäiòùpÜœ×СCÝ{ìá]Ì¢6ˆ&:6BÓC=äf̘á“ðŽ=ÚnÛç#ØpÌ AÎ(Sb̃>è…²í·ßÞ3&““%ê!RhÁ†kµÁ`À(N0Bx!Ñ4_ŽøõÔSOùrË–-óùmÈwd×!ZýÒK.É´µÞßÝpãî¹çžsxí Ðp®£G14wh7V—µ©×^{ÍMž<ÙÍž=Û‹‹äÂA Â8fDsmá8räH_—]·sÎ9Ç·ý°nM‹€ˆ€ˆ€ˆ€ˆ€ˆ@¾J^°H;×Úõ,ïìz—uqUôtÝÊ:yNŸU-uï®ïæU-ró+»nu¾üšeù|D:Øxˆ„F؈ •Œ@ëð†ålšŽ7 Yñª0;âˆ#|W›ûÄ Œ dV§9&óÎa£$OŽL…ÁD(‹3“$î5†QÁ†óGt±aÎ3}5„D‹¨`Cò­àÁemõ,\¸Ð{±Ÿ|¶ç˜13aÁê ?DHú‹XdVhÁ†z¯†‚·ýÜqÇ~èl›=ŽlYô“QŸÉ3#ÿÑé§ŸžslyøÉ5@X{á…ÂÅ. u³6æ d¡¶.î“pABôd" " " " " "P_-B°1HmÊZ9òÚ„F¾šUUkÂE%?mbÂCZ#Œ…°Û–íBÁ†yÄÂ[Ö_}f3†0—Èå—_ž%XFô!|%*r0*^:t®ñä‰ZmÇt '¸±cÇzaä‚ .pS¦L‰V‘8Oh9IBŠŽ>ž:ˆ?xÿ`¡w‡U†˜D¨W˜ˆ–uóçÏw矾G}ºã<ðf«‰úŽ?þøÌú4\¿SO=Õ_«°<¬ß~ûíêPž³}¾ p‚£uyÔQ™UaÞÎ+¾„4qìä>2q**ذ#BÒöÚk¯¬ëÀrŽåæê¡ÊÃÑÆXŽÖÆÐÞ´C«›å\?†ßæCñuX»ví܉'žèFŒ‘Ù_Tè#çÿÃvÁ¶$¥æømhv–ÉD@D@D@D@D@D >Z”`¨ÓÚs‹«–{fËÚ»sWÜ]~Ír[]òlìDñÎèØ±£jÙÂWl}v¨ai£6ò 8sçÎõâ„­Oú$DŠÐrÏ–æIچ幎‰uˆuM„ÌÐ͈ tðgÍš•ê<ìX íB°B !,CøéWJFabu=‡¸}"NØ5Â+Ïb CI†øÒ§O/ºÐ6¾¨lÒçóΜ9±baRˆ>\·Ï?ÿ¼F¶A03ÑP®B^›¤cÒr–E Å 67v<)ë ¾ì²¬ù–0SÁ¦%ðÑ9Š€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@± H°‘`Sì6¨ý‹€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€DH°‘`iš(6 6lŠÝµ 6l"MB³" " " " " " " Å& ÁF‚M±Û ö/" " " " " " "! ÁF‚M¤IhVD@D@D@D@D@D@D Ø$ØH°)vÔþE@D@D@D@D@D@D@"$ØH°‰4 ÍŠ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€›€ 6ÅnƒÚ¿ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@„€ 6‘&¡Yb`#Á¦ØmPûˆ`#Á&Ò$4+" " " " " " "Pll$Ø» jÿ" " " " " " " l$ØDš„fE@D@D@D@D@D@D@ŠM@‚›b·Aí_D@D@D@D@D@D@D B@‚›H“Ьˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@± H°‘`Sì6¨ý‹€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€DH°‘`iš(6 6lŠÝµ 6l"MB³" " " " " " " Å& ÁF‚M±Û ö/" " " " " " "! ÁF‚M¤IhVD@D@D@D@D@D@D Ø$ØH°)vÔþE@D@D@D@D@D@D@"$ØH°‰4 ÍŠ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€›€ 6ÅnƒÚ¿ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@„€ 6‘&¡Yb`#Á&uÜh£Ü;ìà ䷹馛Üo¼‘z{,.qãÆ¹~ýú¹O>ùÄÝrË-Å=í½ Jåšn³Í6nÛm·õísùòåîâ‹/v+V¬p½zõr'œp‚«¬¬t&Lp ,ÈpûÁ~àZ·ní^~ùe÷ÔSOe–7ÔÄþûïïÆŽã&?ð€»ï¾ûj7«ž<¯±¿ÿýïníÚµ«»µk×Î_ëBÖW×&›lâ¾þõ¯ûU7ÞxcV[jn×6îü´LD@D@D 4 H°‘`“ªeï»ï¾îØcÍ*{à 7¸[o½5k™fš.ýë_nÝu×utˆ<òȦ{ :²ÔJášž~úénë­·Î:gÄÄ™?üánÈ!~ÝK/½ä~ÿûßgÊÝvÛm~úõ×_wÔцpô·¿ýÍªÊ}ç;ßq‹/nÈ]Ö»î_üâÁæ˜cŽqK–,©w…ª`¿ýös»ì²‹èÚ¶më`:þ|7}útÏ®ÐvÈ!‡¸Ã?ÜW{î¹çºçŸÞO7Çk[h6ªOD@D@D éhq‚ÍÚŽu‹«–û+Ò¹¬½ûÇʇ›îÕi #[o½õ|Íüqª=¬³Î:îꫯΔ]½zµûðÃÝõ×_ï^|ñÅÌòÆž8óÌ3\EE+÷ꫯ:ë¼5ö14§ý•Bç¾9ñnŒc-æ5íß¿¿.8Ï;ï¼Ó!¨äk_ûÚ×âŒÙÒ¥KݧŸ~ê„EšÍ6Û̯æYƒ€cf÷¼#’ýÙT›ð¸²øË9„›Ÿþô§^XŽ[_×e¥(Øè;°®­AÛ‰€ˆ€ˆ@ó!Т›Žem]ÿòYWgNå§nYÕʬe¥>“¯`³Ûn»¹“O>Ùc™6mš;묳š"ë°Í™3Çýìg?kÇÔ”¢˜û¦Ì¥9[1¯iø\¸ë®»Üµ×^›7Ê_ýêWn»í¶óÛ]yå•îÿû_VµfÍwùå—»Ï>û,³ÞîÿÆlØ)a3œóÕ!QÑãÌTš…‘¦âasÚi§¹#FxJ„h͘1ýõÖ[nã7v[l±…kÕª•_‡hwÜqÇ”f’`ÃNšÛµ50vè;ЈèSD@D@J@‹lº”up=ËÖqƒ+ú¸MÊ{»neü•ü¬j©{§rž›¹ö#7¿j‰[TõEé]á˜3ÊW°ùáèöØc_ÓŸþô'7uêÔ˜Z‘~¬æÇ¼˜ûüŽT¥Ó(æ5-„`ƒÃóˆ˜Ã;,¯<+vÿ7–`“öš4•rMM°)//w7ß|³+++sxižzê©î½÷ÞËàÚ`ƒ Ü…^˜m,,.S ž¹›zV]´Íí`S´K ‹€ˆ€ˆ@ƒ(iÁ†N@ïŠ.n³ò¾îk­·t—÷Š:«ò7iõ«îõʹnÞÚEþelÁY˜¯`ƒw 3 ñ†¤µ¹¬wïÞŽÅäL˜5kVj×v~Ðoºé¦®cÇŽîµ×^s+Wæö|jì«$8>|¸[¸p¡?/ÚWÃK`À€¾“2{öl¿}šíÒ–Yýõ}ýsçÎõ ¤ã*Dç¾[·nþm8á*$œ.d"SÎcà 7ôoÝÓæ¡ÍÐÑãÿ;ï¼ãæÍ›—›ëܹ³çÖ¡CÏí£>òÂAÚ èx8ÐuïÞÝ·‡Úî‹°ÞºÞ#aLÇ]ÓŠŠ ŸœPF¼h¯ a…lH†Û³gOoX~‘´Çj÷m‚ ×™ëÄõâyÔP<¢ÇM(Ï Ž¢P–ö~/„`SÈûÈ–ä‘õ£ýÈí¾ûî¾ ‰ÑIœ¯Á|@õó–ûòÍ7ßÌxeJ°¡~r.‘›pܨ‘D™ï¿.]ºøç áÃi¶Ê¶<×ÈçS[»±{ ­`Sßï¢ÚÎ=íyªœˆ€ˆ€ˆ@z%-Øt-ëè¶­ØÈÙf×®¬uN*+ªV»ëW=é^Xû®[Xµ,gÙæ¾2­`c©¤ó%¿D˜·â[ßú–#™$?êBCHøóŸÿì>øàƒpqfšÑLN:é$G34›I“&¹k®¹&³˜‘>¨+Éè´óæÖŒ$»Œ@óØcå%.ü÷¿ÿu$ÃäÇö_þòGèu…Æè4K’°Dbßo|㾞p;òtð¦9ß‘fð@8ôÐC}Ugžy¦ÏíA'ÄÂX€3:òQ‹ëÜ[:7œ' ŒÀ8F3:[„(СNÅùçŸï®¸â ¿-¢¢^ZëÔ©“ûíoëȇ‚Ð`Æy<óÌ3î’K.‰Q(Ëu¦íÐ7c;DÚf’x³ÓN;¹ï~÷»5®'a7O>ù¤»ì²Ë¬ºÌçyççÅDê§ÿä'?qÛo¿}Ö¾—-[æ¹ÓÖ’¬®÷HR}á5=ñÄ}hàæ›ožUüóÏ?÷çD{ §œrŠ_ôïÿ;¶=†¢ ÏÂH¸»Ï>û„UeM“ˆœ„乌2áu Ë"8Ž?Þ/b?ì£M™2ÅOóÇ:«I‚ möûßÿ~ëŒHøU¾#Kò…˜LXõš…mãÛßþ¶c¾oß¾YçÇ}ñ»ßýÎ¥ÍFÝõ½ßs 6°Ç›…ûCØ \ɬ!îw®#9‹0ž§ï¾û®í.ó9jÔ(÷ãÿØÏs­¹æis¢Þè3q™û¤G±I‡©?͵åþúãÿèºvíê‰g=YÀ5ç~2¦¾PõD—‰'æ „÷÷¾÷½Lݶíò/¾p<ø`½¾©«.ßEa»N:w»9Î#:Ê;ë“—0Ô…ñ"†ï.™ˆ€ˆ€ˆ@z%)Øðí¢ªÚ[£b=÷ö»»^e]Rù¤j±ûÛÊÝ[k?vkË*c;Š©*jâ…Ò 6ÿüç?oX“,iƒŽèA”TÔ¿<þøãkŒTBnDž\F¡XØàÁƒýæ¤òx°X'o‹.ºÈwœèL}ôÑI›ÕXΛ]„':wíÛ·¯!BÙ:ˆMQKs^ÖŽn›42æ‡o´cnwÇwø¤Ðá²°sŽÅ5F¤0±!‰¼£ô €$u°ñX@xW>¹'ðŠAðBK²hçØÊ]pÁþM´ÍG?éLQ·cëy3þ›ßü&ñ\(¶7ÛÎö‡˜ðþûï{O [~²žOYQ ¯_tó¼±»GâÊÚ2»¦ˆ†«V­ª!zZ9x 2…oû¥‡ä®¢èÝwßmÅ3ŸcÇŽõÃj³àÿø‡+lxäLÁ`"®í«ý¤uô¢Ë™r‹Ø}˽Š,¹ò¡p’Ú,û@@!'WZ#É:Ï0?³°m0/†8CDÐá:§±°½Ôå~Ol`Â1¨öBÁxfRÖFfj¨ûÝﬖ?\k®9†ˆ‚'NZ#ïðá[%ÇÛ ñ ¿»˜Osmù.ÀkÆ,lxÙÀóÜž¡V&üLº/¶ÜrK/b$µUê˜9s¦OÄÍ5Êç;mëú]¶ë¤s·2ì&Q ÛaôŽ–Õ¼ˆ€ˆ€ˆ@M%)Øpš=Ë;»o´ÚÊíÝzxͳαä«_q×¼ìæW.ÎQªy¯J+ØP¯B† æOúâ‹/örføqÆÛÃð­(?bñ0 cÅGÞ¨ò‰Ñ™ÇûÂ:t~®»îºL§êÑGõ^¸ƒÓQ$9¥ýˆÅ»ƒíY7hÐ _ŸÃÛëK/½Ô/£ƒo¡)ä@°7º¬$11®ãiÌ+‹wÂÃ?ìß=Úí½÷Þ™cãMõÓO?mE½÷yÐ1äMñã?îÌÃʆ0folÓŽ´vàØÛs\<òˆç2fÌè¬K4ßuîÃa½y[ŒXõÀn¿ýv÷ŸÿüÇOóÏòŒ˜×ÞRìp(:ü»UŸOûê"³|›0)’ &xï…wÞÙwÒ0 láv°Å3‹c"d£m†a6„ÀÄ„ÔÐI©«‡ ûB @L ë¬Ò9æÍ¡™ñ`žýó£>´#Ž8Â|ðÁ~?Ìq…OcÑa'x*„ÆüqãÆùEQOëÜ›`ƒ7±æ+Á%Ž1á ü(Çèˆ[¸‚_Pý‡ëæ#ØXØYÜyÚËÙgŸí;2xÉ a\Sçâ¼F(ÃzÊaa{ ; ÑkFYrŽœsÎ9¾½Ð–隸nË::¹tvC£ X8DÔKÇÚD¾÷HXÜ´]SÖÁƒ \3:wxÊY8_˜;¤®‚Õ†K…õÚú4Ÿ\Ÿ\9lê"Ø Ê‘³‹ o, …\O®yK#Ø þ>bí†zÃ0Î$±¸ý×÷~lð:ãx°¸ð”†¼ßãÎ1\vúé§gÄlîyD•´Fø¦‰QQ„: a4«‹`˾ y^…†®‘DÙ¼Ö¬ÌÈ‘#Ý/ùK?‹èÁ3Ú,|®Ä3‚ž`Xt[–ÕöhÏÊæû][Ò¹S/᳈Jö½Â2³0ê¹çžË„FÙz}Š€ˆ€ˆ€ÔN d›Í*Öw¿iw€kå*j§”XãÖº?¬¸Ó½¾öÃ`iiMR°!§ÉUW]åÅýàdzrF`IoáüÊÈB 0D ÂrB«íÇ*e9>\ÎÙo؉ 뉛¶ºÑŽ»•E   F.†Ÿÿüç~:ù^BìÅã̓-S z‚m e‚k([;ä\y®±>jˆûC‡õ^hQ+×w`}¿‹Âvtîkèñ ‹â{qÇwô§Ät\Þ¢èùj^D@D@D ›@É 6C+úº3Û%çTÉÆ=wöŠÛÝŒµs³–Ð\!Fô sŒÑD‰3 "\€Nqœá¦ß§O"A'—P i¢ãlomÛ\?V­L]?M°!Œ€ÎCœÙ[Ý0Íž{îéóP>ÎcÅê!ÏFXÚ²aŽä¯x'Äosy«‹…u[çžФ£°Nã÷v×Ê…âU>‚MØÑ¡£B ÞHˆ,tŠãŒ„•ŒÔ…Y˜\\¹è2\ò-Ô+š`5,KòO:ºBv^8:†qfŒ ñ±BÞ#Ñ}ÚþXN§—ëgvÉEqì±Çú"¥(ØNÃ}‡Ñ ¶i¿ ø+<2hwiÓ4‚MR‡Ý„¼ ÂDµÁ!Õ˜¬ïý 6x!š÷!=xÚÄ™µ“†¸ßãöÇ2žñx=aðáù•OÇ> û‰ó:ôWÿAì°pÚ|Ú Þãž–0?Ìýeû哜OæeŠˆ˜xD›ÂðÖ#ar¾–ë;°¾ßEöÌËuîoèA}!c÷L®ïÑ|ÏYåE@D@D ¥(YÁfpE÷‹¶ûºNe_Žz“öÂ.­ZáÎ_y¯›¹ö£´›4»r…lpÿ¦ã—Ö¢o˜ñ†øõ¯íó˜K{\]ÅlBO‰èqÅ 6¡ƒ‹?#ØÄÙ¾ûî›é8çêd„Û†¸8·+›ä…vî­,ŸxÚ˜J¸œëa#E%…Yyëèå#Ø0œ6Y ×±ºøDX ×ûÿ¢z3;‡|:¾lŽˆÄ(Zx£Ä"–Jv¢¬óÂ6t¸ÙœHÒVŒP7<8ê{ÄíÇ–P ²uágœGW) 6aøIxþ¹¦ñTÃc­6³Îg’‡M.ñ‡Ü_ä+ɧÝÖ÷~›ðÜ’F_jèû=<›æ¾Ä»øÅ…ÔZÙ¤ÏЋ#×37L–]Á&NØã¸É-ÃgZ³‚ õc…7ì ŽwU´ãCøÒÉÕytÈý%…{ù•9þ¤l‘Àê"Ø„^C&ØÔçÉqJ~•]S 6§{a'˜t ãŒç‘=¯ðèhê‚M]î÷°ÝE$ ãÞÐ÷{x$²ÇÛÄîyšzè¡°HªéP°ÉõÌå~$YqÓá5M†&„6Im5ôÖŠÞßM1ép(¸½òÊ+^ŽãV—eÅô°©Ëý 6ŒpÇ}İñ÷3÷ÏÝÐë~'ü5±-I@ -i: ‰JjÇlkyd˜.”`C]–Ÿ(W墆ïÍš5+Š-—k>—`Sßï¢|›0O—…EÙý† ç:­ˆ'P²‚ §; ¼§;¸ÍH·]ÅÆñgYúÜÚYî¶USÝìÊù‘5¥5[HÁ†a`-¼$Wn„8‚ü¸æG+Ĉh²Æ0?—`7zQÜþòYf!>ù†DÕ7Ñc®c ›\ç|ÅWø!fáºñ[çÎäÚ@!T†Qz°¸N·¤¡pbÇI™ëoÉ<ë#ØX|†‰ríxYÎX£“ð¶:j„5ñF—·ßäıaÓ­½Õ7éðĉ,£¦~þüù™Îq}î‘è>¢óvMYT¬lÈ’:!ˆX8 z‰Ë¶ã3ô^ˆÖ 6ILºâ¦b”(»ÎÑÐ¥¸ýç³Ì: ÑzÓtlëU—û=lÈÃ…XŽÅ}…·#ËÍã~GÆcA ½í8òùäûÌÚnRÒaÛ=§ )ØP¹qxÞ~ó›ß¬1ŠT®s±6ŵ ép\b|¾IjLží¡™`×>êû]”¦]‡Çbyšÿñœ²£Ô„‚ õÁÏ~°[ýG}´#$ ‹Ž’c{~PÛHK¼¥#n§èæ ç“å"Lˆ| 3fÌðÃ}ã=Brh³´‚ Þðc*ì#jž¾¹;g´†~§³uY‚UFsbT'Ì: LsM¸6¡å3¬7a4$I-ÜotÄ´ºÞ#aýqÓvMYGX À,X)škXï0œ#ÚV¨á‹Î–]û¨`†™…#¥evžb¢!›ð:Äåΰg í–üH–½¶ÃµÎu1Ž-ßû=N°¡ëX3¤ê~g_ùªWðxøǹ7êkö¬ ž¸á«Cá‘2…lÂgnœèÍþIñÄ8{f…CÐÇy„ñL²ÑÃç ¯¨ú=ÿ“¾ëó]dÏKž³ö}eûû = ß~ûm7pà@ï‘yøá‡Ç Qquh™ˆ€ˆ€ˆ@M%-ØpºíËÚ¸¾ÕùlFV{ÙìÐjS׫¬s…Oª»)kÞrÏ®yÇ}XµÐ-¯ŠÏÁ’µQ3Ÿ)´`z= òÔSOùñ²eË|œ>±íÖ飃†{?FþšÍ7ßÜO3zÉyCJ˜ÎAä‡äö+«ÿÄ 6áQ¶›óÌ3îÑGõ¼!ÂX‡›¾y€ø…9þD¶g˜YDøŽ=:.D5^xaÖ¨]ֹʅBót¨ð¸0«-‘+õ‘ŸŽpZÁ±€dË–¿7z:×x€ðCŸD°6ªJTD ;¢¼u&—|Ùn×]wõŸ;ëð$2‹†a0R Ûâ 3jÔ(¿­O´£o«‹Þ;$FFb$(‰â„“ºÞ#¶¿¤O»¦¶ï¡GªYÀ“¡€ {ÃÃã~$ñ*\Ì®½öÚÌÐîxP!J Þl½õÖÞ[8³¨`§L[Dc¿ˆ²¡øjÛÇ}6„`è5t„-¤ŽŽ0ÇFŽ&Ú:£ýØi/¿ü²;çœsâ­Æ²b 6ùÞïá}b6œ^q™vm§NêyÙ 7ÄýNÝx·àÑg#*Ñï¿ÿ~ÛmO„GîÃ4ƳëŠÁ‰vÈ3ˆç¢}ÏX]…làˆç‹ß®üÜ}^¯æÓª%®ªú_K°B 60cXVbæsYÔë€ ¼m¬cÝ–­v¬q‚Ígœá¶Új«¬ÍlpëÇÂDˆÌ`o6™ÏeöC¸.‚ õòÖžÜ ¹,ÚÎU–u¡`Ã{FáH2’ZÚhGVÆ:÷QÁ†õa®~ Ã0LB̵%É:yV'eðž¢ÓÁ[ó´‚ Û#¦|ðÁVUì'áKè5BAÜíé˜'žŒˆBž=Im޲Ï=÷œ?§p»°óB»$ArœÁ!’öµºÜ#Ñ:¢óvMéü’(ôv ˃Ѫ¢Éu£IzÃm˜&Ù³%þŽk¯WCÛ¶x³!n¤±†lØ/¢¹œr]g¼Ûȃ X¹Ž¹˜‚M]î÷$Á†sŒŠZW]u•»÷Þ{3§ß÷;^}ÃÐ bøñAd@X ÑoZ3ÁFœœY˜K\þÊã*wQôG<ñšˆŠ¬Ç)h«ÑáÄa‡'C¦G-ì¼Ðñä˜iwá¾Â! ½“¢õÔå‰Ö·×ôGÕí&x‘„†Ç á…Iž\„2ж,¿ÛÂ/…—^zÉßc, ½ã˜Çw$‘-ac‘`cÃ#š„ÞP_Öî\è!pþùçg p‚§”yÒÙv|â@.£è³…ëŒg^&iÅ껾zô°öÕÞ)ˆXx*™™x³¤Ž­…i±o†…Ocõ½ßÃ|4Gu”¿gÃý†Ii9vžsóæÍ ‹xQ¸P÷{x>Y;I˜‰–ŠúÅ´?Be­-²á‡k¨bmŒ0#Ú¶Y}®­Õ‡áNæhËaK)ßEQ¾V‘ïÖè3‰ö‰‡!÷yœÕöhÛÔå»(M»¶úísøðáÎì³)˜›¦pZÖ1è~oY×»¹ž-¡‡Ã† ó!¢xÙaÑ|uÍõÜtÜ" " "дÁ†p¨Ð$Ø8?EÈDÓM›@±:p„*‘ß&šg&¤…øG’Ó4áoávÍeZ‚Ms¹R¥sœºßKçZ–ò™›Ç^qžxÕà_Þ5¥|Õun" " I E6Qï·Ôp(ÎÝ~\ÉÃÍÇH°kC×2*SRˆ8#ÜÝ;ì0Ÿã„üDí«>^ë„ä¶™4iRCì¶ÉÔI¢Øm¶Ù¦ZZV=òIºa ›ÌÁë@š%ÝïÍò²µ¸ƒFÌ'—yÇÞ~ûmŸ\9 ›mq@tÂ" " "P`-R°iÉÞ5´ 6¾‹Tˆ€ˆ€ˆ€ˆ€ˆ€ˆ€˜@É 6qÞ5lÖóÍH6¾›Tˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@É 6ÑÜ53ÖÎug¯¸½@øšg5ò°iž×MG-" " " " " "Ðr”´`Ã0Þ çb ¢MK6 6-ùêëÜE@D@D@D@D@D@š’liÆ·Þ¾†XÓÒC¡¬AJ°1ú¦I ä›¸œ5 —Xóÿ P‚Íÿ³Ð”ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€4E%!ØXØSœW Ð%Öd7= 6Ù<4'" " " " " " M@³l’BŸBÈkB_NK°©ÉDKD@D@D@D@D@D@D )hV‚Mmž4!X ߺúÙŸ`8dbÓlŒ„>E@D@D@D@D@D@D ih‚MOš¯¼jB5§%ØÔd¢%" " " " " " "Д4YÁ&_‘F5雕›ô¬TRD@D@D@D@D@D@ŠA I 6I#=E!Ò` }Š’É=/Á&7­bhR‚ ^5g¶;('ó¤¡ 697ÐÊ$ØÔ@¢" " " " " " "ФU°Y½`–û䎓=\^5i Ûf$Ø–§jB(ª`óé}§¹•MsIb 5ò¤)ìe—`SXžªMD@D@D@D@D@D@ M h‚Ío=ä¾xóA·ß‚n|ë‘5ÎK#=Õ@R°l †R‰€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@ƒ(Š`cb gtÍâÑ5NLbM $] Á¦ 8U™ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€œ@Q›ESþéÈ_sàÊî€êÿ¡I¬ i4Ì´›†áªZE@D@D@D@D@D@D P]°éuà_‚ õ®‘XS¨Ëš» 6¹ùh­ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€›@£ 6]wû©Ï]ç]sø²ËŠÍ£Eì_‚M‹¸Ì:IfL Ñ›¶}†y\ò®)^«‘`S<öÚ³ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€¤!PÁFÞ5i.MÕ‘`ÓplU³ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€‚@“l”»¦—2}lÒ³RI(¢6§}±•²fÝÌùž½üv7£rnf^ K@‚MÃòmªµ÷éÓÇm°Áþð^xáWYY™9Ôm·ÝÖ•——»yóæ¹÷Þ{/³\-›@EE…o'UUUѵkWß»wïîë›>}ºûôÓO RwK¬$×=ÝTyl³Í6Žv}Ö°là 7t½{÷víÛ·w‹-r/¾ø¢? ÎsذanáÂ…î¹çž+話¯-¶ØÂ×ùæ›oúýtªLD@D@D@êA (‚M4’ ×ã ÖaÓ|›Ö­[»ÝwßÝï…NÛƒ>èÖ®]›z¯»îº«ëر£/ÿòË/»?þ8õ¶*XX;ï¼sF°¹ãŽ;ܪU«2;8ì°ÃüôüùóÝÃ?œY®‰æI`uÖq\oD¹É“'gN¢sçÎn§vª±žMÝjc>pà@Çìé§Ÿv‹/nꧤãH Ðh‚ ?úËÊÊ\뻫:øÃéRÖÁµrî¾Õ/%ž7|~@7.s¯¿þº{õÕW3ó¹&x‹¾Ç{dŠ ØÐa‘‡@®Î›â\“p¯ˆ]ºtq+V¬pS¦L Wå=M”Nâê­·ÞšÙO‚Í7ß¼Ær+@'žÕqF}=ôoâÖ'-kÓ¦;ðÀ3«©gÉ’%þYòÑGe–—âbÆ.»ìâO ¡¡"¹îé†f‰˜7fÌßfm_ï¿ÿ¾l>úiíå<òˆûä“O|‘ÝvÛÍÙ÷ ¾øâ ÷ù矻'žx¯›©S§TôÚsÏ=]·nݼ0yóÍ7ûý5Õ?i™#öïßߟFȹ©ž—ŽKD@D@D ™@£ 6e宼Cw×®}WçÊ[ù#jµ¶ÒU­\ê–,ûйªêé¹Ý'Ÿ®Ö@À~§ñv‰ 6xeà‘Æø1ß«W¯LQ 6E™ÈÕ¹“`S”K’µÓý÷ßßµk×.QLÉ*\ËÌèÑ£}h ¢Èĉ3¥ížŒ.§b ¡qfŸ}ö™ïPwèÐÁ?3]0¼vî¾ûnïucekû¤óh^&„Â<ú裵mR2ë{öìéÆŽëχ›—^*Ü Š\÷tCìׯŸÛ~ûí}hS¸ŸÚ|„|¾ëCqÄÚ>uѶ–/_VëÛß–[nÙ !Q|°kÕª•÷»÷Þ{³öÛ”fòa>bÄ·É&›øÃÇÃŽP2™ˆ€ˆ€ˆ@ó$Ð(‚MEÇ®u×þ®Ý†#]›^ƒ]E§žžÖÚ¥óݪOfºïMukÎqk–)—Ac4£ú6ßc=Vkh?€y+¾©—`ÓW7y¹:wl’¹5Öë´F½bê²¼â[çÌ™“å­sÀø0§Ù³g»gŸ}6«ê}÷Ý7¾ ;!¿!+ÞøÊ+¯¸7Þx#kû\3Ûm·Ûxã}‘'Ÿ|ÒÍÛrr–‘·g¯½öòçNøa`…²\÷t¡ö­'Ü'ë–.]ê:uêä‹Õ&ؘ8Â6÷Ýw_¦êC=ÔWàYsÏ=÷d–7ôí™vª†÷NS´|™oµÕV>¬‘s3¼e" " " Í“@ƒ 6­º¬ïÚo<ÊuÚbWÞvXJ•+—¸¥ÓïrËg=îÖ,ªö¶‘5(ú 6¼yàrãðáÃÝ!C²ÊÔ&Øðã™Î ù2p‡_½zuÖöµÍNÂÜì + 'ëØIN£opòqÓOä¢ 7Þ $Ä “öÆm“Ï2ê¥~«»!¼ÍÂý •Ãδ/òOàIµúpD€ tö±`Á‚FõÈ#}c¼­Ž¶¯èyÚ<<ð2£-×vÌ…l¬Œ(ƒ8ƒÁþCñŸ„\!昑pzüøñ~÷Þ¤I“lUæ“óÀCKó ÈlX=7Æ€ü"¼ÒæÁÉuOS×…û†ü&Ü“tLó½wð€a›¸ërÝÓì;ÁŽç.";ù[j{ ·m+Âr 6!‡¨8bí5ßv•æür•±ÐAÊ<þøã.Wxm¶Â½Ï3a5ãþÁK}ä»m¾Ì {´DÊwÞyg^Þpùœ“ÊŠ€ˆ€ˆ€4<lð¤é0hO×y›#RÉâÿë¾xó‡ç¬áÔW°á‡ú]wÝ•óG ù*,„ÂÎ$I°!×INé܆F¢Ä§žzªFÂÄð‡?I.9ž6Ú(³=I/mt:Z$>fä‘ÐÛ‡N T ÷ÀÕ‹óâ˜HÒJ"Öp{öIŽ ùüøH“à’†ðÜ‚8/;‡É~ûíç9Whšuü6ø±n–«sg‚|’[xÇKØ É©U0–ÝrË-¶k~uåû‘#Gúp!«þt|xCN[‹¾·N`®;oê¢Ã5FÕ"LˆP¥Ð`ËõIQ‹pDKK”jÛÒÙ£M‡##!”„×ßÊò™ÖÛ†Ñu¬Ó±¤ó±?kŸœƒíÁÒGÚ!£FòÛà9ÃèMQ£ýÃc{BWj³Ðk'®¬u’ó¹§©‡ðrþX{³º9W®Iœ§DôÞcžûgF›BHÀ»†;nXݬ§b&ì¶õIŸ´MËßÃ3çí·ßÎ!›ï§IDATS¾Ïƒ\÷4× Ï-Œ6Gþ!3®9¹uÅBCô"Q-¹ŒzôèṄáKÜ3´+r£ðŒFÈL#ØDÅŽgs’!êYˆÒèÑ_†øq ìXøÎÀ“Kj¶eh×L¡ñ̲säyEýѶˆ7‚#mÂŒr<+i¿vÙºðšÂˆûFöýÁË„©|,_æ!ÎËîõ|ö©²" " " MƒÀÿÿÿ,u^[@IDATì¼\EõÇ'½B„„–Ð{GzGÞD¬Š {Eü#"Ä^°`EDE¥JGŠ = C @€ÒHoÿýN8ËÙÉÜ»·l{ïóù¼·»÷ÎÌùM¹s~sæL¯õ×_¹k‚ôêÓß Xg·Ú¾Ÿs½ú Ìô„åK¸Woþ®[øüýnù’E™âX ü¬µÖZ>Ò‹/¾X7ò AƒÜ‘GéýüòËnÍ5×ôßÿ÷¿ÿ¹{ï½7ôèÑnï½÷ö÷¦M›æÖXc ÿýÁt?þxMœ­·ÞÚm±Å5×ôÅ‹»+¯¼Ò-ZôF{XuÕUÝ!‡âƒÍ;× 2DGqO>ù¤»ÿþûýµ<Ð9²æ¾þñÊ+¯¸ÕW_Ý_ºí¶ÛÜ /¼ o»ƒ:È1¢æšþ1cÆ wÝu×éK©ßuy5ž±H>ú¨?~|Í-ê‚:—¿ýío5÷äÇÑGí à1»üòËå²Ûc=Ü:ë¬ãs]cz 'øëÔ×M7ÝT“öE°Y¾|¹[¸p¡8ð~¾lÙ2wÙe—U£KØê…àKŽÃ† s|°ëÕ«WcÅÏyóæ¹þýû»¾}ûºùóç»þóŸÕpR&p¾ù曫×õ C_¸õÖ[õ-ßÖ÷ÝwßÄgSî[n¹Å‘¾–]vÙÅ;V_ªùN¼ûî»Ïчã?ÞõîÝ»&ŒüXºt©ûË_þ"??÷ÜsO·öÚk'ÞO»Aú<§žPoyË[|°¬íäˆ#ŽpƒNLZú\ž>Íø²×^{%Ö {þùçÝí·ß^ó\Ý÷fΜé†^s_~P/cÆŒñýL®éOÚúW\‘ 3âÑvßö¶·ù$îºë.÷Ì3ÏT“Óy*2$õižùæ7¿¹:vÑ¿®¿þzGÛCúõëç¨>cBÿel]e•U|œ?ÿùÏÕ`;_%­ÕV[Í?‹S¦LqwÞyg5¬þrÀø±–xŒ ´WòŸ$ômpFößÿê{äÒK/­F9üðëãÿc=æzè¡ê=ýZ°`$ÏèØcõègé¶øê«¯z “Ɵظ¥ë4ö~¢H7äÅ|à 7t;í´“„Æ+Ï3-¬!`†€!`t½šEØôYu´[u»ÜàMÌUÒyOÜàf?x©[:{j®x8;E È–qãÆyyÉ’%î¯ýkô¡¢œ39¾ãŽ;Ê$6•¶çvÝuWaòäÉ~ÂÏ„™ 'Ÿ“iH™lë µPù÷ÒK/9”n”v&ѳgÏv;ï¼³Û`ƒ |ÒG™@Y‚ðXo½õܨQ£$ºÿåQ.î¶Ûn>¿!7Pä¦NêÀo£6ò8pïÙgŸuÿùÏøZWôdžÀ”‰|Sv”‚±E_ê‡û((Ÿ"HØHÞÀè¹çž«¢˜Å1T*Q~ž~úiŸ>õGûÑ g# H@HÁ>}úøâQïÔB¡ "´««¯¾Ú+·ü&?(÷ ‰G›¥ÍH{+!Ó  œ 9‰O}º»ñÆ«ã'÷=ôPGÝ ´EÚ ã#mhl¥ý@Ôˆ'MØÈuùÌJØ„ä}KÈpò5gÎ÷ßÿþ×'Mý¼öÚkþ{aC?=ì°Ã|\òyÕUWù¶@ZÇsŒïO\‡TûóQGåÓ×cx¬-ÒNÁˆ÷JØÎ&Mšäî¹çÃ…uJf!€qž¼0.f!H« F¾ÔÃb~÷Ýw÷cIÂ7ò»d†€!`‚@Ó›~klâVÛïó®o…¸É#K*DÍ«7Ç-žöDžh6B0¬'Ú†‰¦XÄ x1ÑÕ‚e“`&Ì( (ƒ¬¬"!aóÖ·¾Õ+WLb!KPе Tˆâ€5X$„ê»ï¾ÛO¦u\¾³ªM>H?T”¸Ïª%J´ˆ&lx.ÏGPš!ŒPnE˜ì³º+ =äáêI8™Ÿ0a‚{ä‘Gj¢m»í¶n³Í6ó×PZþñTïw*aƒbuíµ×®¤ˆ”Áq‡vpo¼±/;d «íÔ¥Êu$Ê{# MN„Š?Ï×m‡vKûB6ß|s·Í6ÛøïÔ+õ«åMoz“·ÞàDʲý†vE+£d‰…U¨H¢Àb^—ç§}ÒO(3¶É´xrO[Ñ— X´díÓBW+Ù’Ör¢üÓV°ä6£ûd í‰OM~P䓲Šh«”nH›²¢óDZyÇg±šÓm7f]¦-Obcù€¬¤Nð+KØ$‘#þ•2Vcy‚J(I„ á6Ùd·ýöÛû(>ô+MÖ‡–7’ö¦›nê¶Ûn;ÿ“…Èf$l‹XaBxiØÒÖwÚJM×)í²¢§‘R°iä³,-CÀ0 CÀh/M#lŒÞÚ­~è7±ÏWÂÊäð•«¿äN­Utò%b¡Ó(CØ PwÜqžae%]‹ž(³]„ kŒ°ØA±DbÛ¸®'ùÚÔ^O¨QüP¬BAq“ç2gB!@¸§ =™O"„´rÎ6òXOôdžIü5×\"[IBeIòÛI[¢(„EH¸q½ ŽZñd«„L(šHh$a#Öiäý ÜŠÑá†`õEÈ* l (ÅšCªZ\G´rš%oŒXÿùI™Âm`õÒÉCØ$õiž!õ’ÖþÃ1ˆz@t߃ü ·tb• [tbD‡¶žŠm…ñÉùOç©Èx6Ô“lÕD¢Î–&B“,…h×`-u^–°Ñ¤Jl¬,CØP6Mä±%–q¼§µ¥ýöÛÏ[1ÆB¸` ƒè÷ ï/ˆ0>CÑ–ƒl§”]§Xåðþh´aÓhD-=CÀ0 C sh.asØ9…JþÊU§aS¹l‘Ê6Xɰ²(Û‰ÂÕrQb±Fùûßÿî}q¢-lØÂµ fh©#%A¹@då”ïzB[e'Œž4këîiѾ?4aƒâ#Û^ ©ôJ¼Ä'b¡“ÕjAç+¦4JÚZû׿þU5çïDÂ…G[2Hø,ƒ£X„`´ý‹Â!"l°šÂú ¡ÞC+£òÕyˆMªá¯²MD¶‰a‘ÁÖÂ&I#üEa€àãHÚ-}~Ÿ}öñ×µ5€¿ò¿5(ÃbÅÄ#,ŽòJÂ&©OC"0¾ 1«É“&^° dÜAtß ·r_“¼1«*±Р¦Èx Ç í—'¤ÖäFše ´GݾÁ ”,ä¼3HK“#’–àZĆ4Åâ‘kƒGæBeé¼éîÕÎq‹^®ï!W¸Š@Y¿Lú­\é ,¾&NœX£iÂF¯NV3–ò…N&úˆžP'%š(ЄGøˆ-·ÜÒû¦àº&lÄÂ% Ÿô;«§'óI«Û˜î{X ‰å¤¡ MôÈ}>…XÈÚßuÜØw§"ã<úi„¡õÚ™X¸Ñn“ú7ÏÌBÈ3Ã>*y\‹6¤ƒ_+Þ+"zÜ”kò©ÛHˆ•~¿$Y’޶Õ¢®ÓX;#®E|O<ô6\. æ:¼}7 CÀ0 ®‹@Ó›¾Ã×q«ît¢4vÅ;+Dó'ßéfß{‘[2sÅ~ò¬ñ,\vÊ6 8¢ 8áE´ï„$ÂFO˜‰ÏŠyVÑê¤I9N!1;G´Pø },¬&lÄ„ŸðzkI?ïoMØdÝÓJVôQ¢Ò¶ Éê­>‰ˆ¸šœ¥\GÄÒ$ëqÍÄ‘­i ]eU>­¬Z Wï¥Lzõ›|‹èrma£~}]âåýħCaýÂw‘p;aQÂ\¶WÈÖ%ž!>9ø»ŽåR胊°l+¡_H^uæ~iaÓÈ-Q1Ë=VµÃ¦Èx û4c!>|úåM7Ýä·é:Ó>~b[ÃK{Ár­>lôX’#’¯²„ }–þ£·D‘6D*˜„Âö@y†¤•~¿¤Y÷ibW·=ÆÇÚY˜—"¿°)‚šÅ1 CÀ0º&M#l€£ï°1nèVǸ!›’ ¹^ã^›x¹[2ë…Lá-P1d¢Û–¦ž%þ ˜ÈãO‚ 2Ž1eg…A Ä!1¢• ma£Ó MÒ}Ä”zBDØ?q¾š¤¸èíPæ´Y’ ¦ ‘“³Âëú ^('6ep”-”­ˆÓaQ“j…+$fÄSH ÎE?5ž$„MšåKÚsQ²Å±°VÄiâ“'É9´¤K»Á²†íPH^ÇÉ’NøÙ†4¥^Ò,Ì4!¡fÝ÷ôuÉ««´.÷ù”6ÕŒ-QEÆMØ «±†ìÄ2QŸF¦-QÀ‘¾úJáètq^œ4Ö.õȽ=5$G$ Áµè–(meF¿’±›r³µˆ2h28æT\¿_°pIr:Œÿ9 ^÷«zíLç£è÷z˜M×ↀ!`†@ç!ÐTÂÆõêíúXÏ Ùâ7d³ƒSK?÷±¹¹TŽYQ9&zùŠÓR#ØÍÂ4‚°ááÚ b”½P9ÖJ&lˆ/¾Gˆ YÂJ¾ÌöQH+ ”,DO¨“ÂiE‰7+Øl‚,Ââç¤Z4a3|øpwðÁ+Ú,Ê Î+õ¿ÄÃÊHáÏ­Ó–ïz2ϵPiçZÚ±ÞZA‹­VãȇÎH'6epÔGgCœä=Ö[¬‘h_úƒÐÖU!aCÝ“w$æ»ò +”^9z‡¿ÄƒtÁºAo“"-}BOè [òK8í‡ßY„~s,¬ð²J[Þ„éŠÕד,“Â8Y~k!´,"~Ö>­ó³œc¼¡3¨wí [÷½N$lÀ¡Ìx „ éÈöT¾‡c2Ä„°XPÑ·°pÄ16mˆãËÅñÁ1iË#÷ë‘r̼8¢'N(eÆr±*Uø:’„°h º^xa¥­¬º-’ÏX?Èz¬w¬…e/ò»æEÒ´8†€!`†€!Й4—°¡Ì½úT,mÖrFoë­¿«ë¿Ö®WßÛ–/Yà½øˆ›ÿÌ]nÑÔñnñ¬ŠÂ¾<îˤ3áëš¹jažˆáö¦4ÂF;gE‰dbÍ 0J&[Hd+‡Vnõ„:°!?b‘Â÷˜°ª-þ4aCXm Až {ÈÛpPjpN‹ Ð°J:þfðO+¥ã_)ëØ±c«§Sq?<Lää,0ñ&ñ±ˆÀQ±œÞEÜN lÈGQQ¬P,ùDP<©o”2¬cÀJîq?´†ÑÏEY¤ aåD›X‘7$lPX±ðAñ—ûÔ='ñpÖY´DoÚ~ûí=‘ÇuÚ õC<„º#ž XhòD[QVˆ",P³ˆ<›çjÿFbq¶‡0M÷ð³“$´yœŠëü'…åz£êWÆ,€ æÍ›ç­hiHH´é¾S¤õXÕ ò ®yÆMàj†~È62$´b„´Æ’J°òÔ?òAbŒç{QÂFocÓýD=Ê-JØ@:1FÐh‹§´sÊQ$å¿ù曫GÑë cþÍôûEòɸÁØ˸£ÛïqäMøzíLÒ,ói„Mô,®!`†€!еh>aó:½ õ'Fõ¸Šë=`Uu٢ٕ‰Ö\·°¢€-[4·k!×…sÛ(´K¬XPBµh%(´°!œ¶¢Ðñô÷Ðñ£žP×#l˜°³â®WŒI›¼bÃ$‹$$lP¬)ÏK“¤J «'óøVm±81Ç´ä‰ÕsñYÆ£<üQîPAORîHCü½4Ú‡ i—Ák¬*H#&X?q-$lÂSŽÂø„GqEB†kr`–ôlÂÖW´'$$™üÅÈ?Ný‚ÌÑ ¸ÓN;éK>ݰOÕP?„ð ·•ˆµPÌR@¢ë>%×ê}¦bÆmaCº£Gv{íµWj½„cñtßëD¦ÈxÖ§CÒ1tBŒÒÏihÒÀܼûî»Ýæ›oîÉÉ2„&G4é¾âIoü/JØHÛ&¥°|zË#$&dý”¶3fÌÿð˜2ÝôQéoäöoÜÇ9°–zíL‡-úÝ›¢ÈYK„ ¨§ÐQ†28Bœ`%BjAY‰|@”„„ a!|Ø&D\m‚+$iõ´ÏðÙÔÑäJ{¿ÿþû«dO¨òºD‘¦Ÿéz'Q†“Ú í'­b½#Aå9á§øåxòÉ'}¾ä¾8¡Žõ_ £•T¹VïS[tÔ +V>„ck1ZòöiÆêE¶õHZÔ Má8Ä}Ý×Ù®9©E+ÁIXÕ#tzY¾—Òú4Ï×i‹Œ[Z );mÒH¶ñeíßô1Ù> öŒA"Bb‰Ž9†OÁ•çË1ìú¾¶þºôÒKý-M%9–2þJ»_Qáø(ÏÓ}ÆÚ¯ôI  ñ'd­\¯×Î$\™Ï4Ìˤkq CÀ0 C óha#E0zk·ëçøŸ}‡ööŸ72InÛg ÈCØ´ ;ÕG 8pÔ)Šw›2‚R‡’‘­¤)(3$(–¤‡ÂNÔcé‡×BMò…’ÀŠ,iƒAAÑ‚L N–-YYÒlf˜28‚= Ê’8JB"FØHYx.„(¥yÛÄ€XiAñ¬,"ñ jòÔ+u É…R)åÌò¼žFð¥^QÚùËÚw:«FYÊñ€•–PÚsL¤_Ñv±¦*"õÈ‘"i–‰CŸÜIätHØÜsÏ=þ‘´5Æ[ú#„¿õË25aq CÀ0 C +m!l¶>m¶é¾jîû næ„YóláJ"Щ„MÉb­],¸%–Z°žà¤$MÙ×qÊ~ORÐʦۓã‹bÙª:ìÉX[Ù‹@;Ƭ ñû™ ¹Å–ˆ-ú¤)S¦xK6}?Ë÷,äH–tF[„&meM"l™KË0 CÀ0 ¬a“©n®§6ø`Sœ¨>¬0÷Ç2†I¹öÝ€U¶_5[Ú¡ 5»LíNß›v×€=¿(íô1Ø6Xç16B²`I(μ¹wã7z‹Ç¼eÔäHÒvؼi– ŸæóGÒ6ÂF°OCÀ0 CÀèÚBØ Ûl©Ûú oXÔÌ?ßÝÚÔNÀ£Gä¡§6¬ ãÄ•c_“…ë޵m…´KAkEÙÚõ #lÚ…¼=·,íØr‡o'Ùæ+[~ðõÃV¾"¢ÖÎ㠺ȳ²ÆÁgD=["Ù#lb¨Ø5CÀ0 CÀhm!l(ìž¿­u|aqcÒ|z a#Hâw€#Ä™ˆË1µøºÀÊäŠ#Êp;€ÄkÆ'¾#ÄÉ1Î9C ÍxfwO“íø>B±d›ƒ‰!ÐUh÷xÀ¶P,að‰Iƒ¥ ™!±±J,*Q£FòÖ;·ß~{Ñd'Èø¢ÁÍ„ ¢iããgï½÷ö÷ØF˱ñ&†€!`†€!`´ –6C·:Ê-ž>É…~lfŒ_P±²y¡]8ô¨çö4¦GU®Ö0 CÀ0 CÀ0 C [ ÐrÂfÍc~ìfÝõ+n‹M³²iM›2¦58ÛS CÀ0 CÀ0 CÀ0 ¢´œ°Yû”+Ý+WîóZÙpÑH›¢U™=ž6Ù±²†€!`†€!`†€!`†@;h a3ïÉݼ'nˆZÙ°5êé?Ì06Ml FØ4\KÚ0 CÀ0 CÀ0 CÀhm!lÈ7Û¢ðe³öϹqï±RQ m&]\ìtŠ•³ 5aS‡ý0 CÀ0 CÀ0 CÀ0:¶656 §Np¼{„‘6-lFØ´l{”!`†€!`†€!`†€!P¶6ä•­Q3nýÏviÃM¬m³¸ñ0”þg„Mi-CÀ0 CÀ0 CÀ0 C ©´•°¡dÏÿúðjÓH ´Â¿Íóq#€ø4¦hÅ0 CÀ0 CÀ0 CÀh!EØPnH$æ×ÆßPÿÌòF‘ã«69À² †€!`†€!`†€!`†@è8ÂF0Èbm#aù4òF£‘þÝ›t|ì®!`†€!`†€!`†€!Ðn:–°`òXÜHÈów#h¬üi„ÍʘØCÀ0 CÀ0 CÀ0 C “èxÂFƒ•‡¼1‹\íw#ljñ°_†€!`†€!`†€!`†@§!Ð¥ Þˆm¹Û ¬ëëÆ¬m4j+¾a³2&vÅ0 CÀ0 CÀ0 CÀè$º,a£A¬gyc¤F˹²„ͺë®ëFå†îèzõêUûûeta–/_î,XàfΜé^zé%7eÊ”.\˺!`†€!`†€!`tUºa£ÁOrVl¤Í(%l j6ÝtS7hР7³o†@7G`þüùîñÇ7⦛׳Ï0 CÀ0 CÀè4ºaÀFÚ¤7³"„ÍV[måÆçž=tš[¼É ×wÜ"×oõŠuMŸôç5úî='¼Òè$-=C ¬Ç† æÆŽ[½þôÓO»‰'VÛCÀ0 CÀ0 CÀ0š‰@·$l 7ãÞ5Âû¹ÑÞÚT7cü|}©Ç}ÏKØh²fúO¸A».n+fFØ´þ÷ðõ×_¿JÜiÓãªß l†€!`†€!`´ nKØ€(¤Í玮wÆøîþÓ^¨¹ÖÓ~ä!lصÝvÛyˆfð˜ë¿éÒ¶Ãe„MÛ« Çe@“6>ø mêq-À l†€!`†€!`´nMØgŒ´ééV6y›<Ðû¬éËéFØöÙJ„´Á§Í 7ÜÐÊGÛ³ CÀ0 CÀ0 C "Ðí êt‡sÇÔlêéV6Y ±®Y¶Ö<·àا;¦{aÓ1UÑã2‚µ¾mÌʦÇU½Ø0 CÀ0 CÀh9=‚°‰YÙÜxȤ–ƒÝ)ÌJØì´ÓNnôèÑ.¯u „؈m6­¸FØ4 ZK¸be3uêTwï½÷Ö m· CÀ0 CÀ0 CÀ(Ž@ l€'´²éÉÛ¢²6²jÖq]¿Q•Ó êHxtzÒi]u’©{Û›ºY€&!ÀéQÛn»­³mQMØ’5 CÀ0 CÀ0 *=†° ­lzò¶¨¬„Íá‡îzõêåæ}èáºGw YΜÎ5sÂ|7éâQBÕÖWð‹6³h A`Ÿ}öqË—/wW^yeCÒ³D CÀ0 CÀ0 CÀˆ!`„M •n~-+asÄGx$æZ!lêˆ6\³A5¤b¡uS5@Á/FØdÂmàÀÞ"DÇZ{íµÝóÏ?¯/Ù÷Œ@Ø W\qEƵÁÆçvÝuW·É&›ø—^z©{ì±Ç\ïÞ½ÝÇ?þq‡ï¨ /¼ÐMœ8±6b~xâ‰n•UVq·Ür‹{øáúý:Ë#<òHŸç—_~Ù]vÙeY¢tÛ0;¯[ ø‹_üÂ-]ºâT½¤º•ðÓ¦Msþ󟛎K‘¶gõ›½Z6ÜpC÷–·¼ÅGøÓŸþä¦OŸî¿'Õ¿NyÀ€n÷ÝwwÛl³9r¤?‰îW¿ú•²æškº~ô£nÙ²eî¼óΫ¦«ã7ã{¿~ýÜ?øAŸôwÜá}w5ã9]!Í£Ž:Êí¿ß~îºë¯wW]uU®,wewØaÇ8Å{ ËÒüànÁ‚¹ÊŸ7°Œ‹ÄÓãhÞtºZø²ý|ï½÷v[o½µ[¸p¡ûõ¯Ý%Š_vlÜ~ûíÝ»ßýn7iÒ$÷ÓŸþ´ee.Ú§í}Ú²*êvê1„ 5'dÂ’×–ùмõøÉþ³§ýkaÃ3D£.$NŒ°Ìá¾øºÁ*'«´Š°a¢Ð¿wë­·ºßýîwY³×pm´‘:t¨›={¶%%ú®w½ËpÀnÑ¢EîÃþp5ž“N:É 2Ä[I½øâ‹îôÓO÷÷¿ÿýï»#F8ìÓN;­§ì”‹>}úx"ãòË//›\ÓâgÅ6)e¬ÖN>ù䚤/¹ä÷—¿üÅsÌ1~âÁÍ9sæøú« XòÇÁ\U¾>÷¹Ï¹§ŸnŒ#ñßüæ7Ž­bLæ™8µZ˜8}æ3Ÿq2®ýøÇ?nXÙò–åóŸÿ|•°¡ÿQHRÝ¢œå+_ña¾ó︻îºËoÆ¿¢m¯ÝõÛ ,š•æñÇïÞþö·ûäÏ9眪Ÿ«¤ú—| 4È1ô#”®w¾óþç7¾ñ ·Ùf›ùï<ð€ûú׿.Ášú¹úê«»óÏ?ß?ãßÿþ·ûÑ~ÔÔçujâ(Ò?ÿùÏ}ö°®|ßûÞçßÍYóÛUq<ãŒ3 ±úhÙ$?/¶I(JØ`Ù‚åŒÈâÅ‹Ý /¼à.¾øbwÿý÷;Vp±€AMØ@jüþ÷¿w|ò,&†’vN@6ÝtS÷å/Ù·k)„$Öí$E#­ne’=wî\÷Þ÷¾·!ý1,{™¶×Îú ËÑé¿“›´ú§Lgžy¦÷%å{å•WÜäÉ“ã;A³ùæ›ûïî¿>Ñ„E•’„äZryÿý÷w2F÷»ß­’¦eÞ MðƒÝk¯½æh—Ì# ç›)Iãh3Ÿ™%mšO~ò“n¯½öªŸ1c†;å”Sª¿Ë|)ÛÏå]œ®Sæ˜ÖN?üáW‚§ìب æ¶7ÞxãJÏhÆ…¢c£½O›Q=#ÍCØôÒÛ ]¿¿ëÕ·—ë·Jo_»‹g/«8KÝÂW–¸Å¯[Ýô„joanâÈYH®ÅsB—¾.Ṅ4B鮄 [d°À4^¤aó› ©Ô»B*±úyÁøhXp`‹\{íµîŸÿügÍëŸø„7›½ï¾ûüŠ„Xòß{ÞóÇDYVK&×ðèE°MÊ„(y·Dì‘ &¸³Î:Ë—˜ìcslW»è¢‹¶e‰ô™dJ› }`¹Õ(i×rR™I´–N$lÒêvuÖ©Z.ÐW›aÍW¦íµ«~uv•ïI„MZýS¶ßþö·nذa¾˜øÀÜ«¯¾ZSdÙ*±dÉoöÞ¯ ÜÀE•’f!wR_üâÝÎ;ïìã}úÓŸvÏ>ûlî4bP,éG¬à_sÍ5± ‰×º:Ž,^å-s"nt"a3fÌ÷µ¯}Í[“ê"4’°)ÛÏ;‘°‘±±ë„NÐÐùïeÇÆí¶Û®º%J/F®ô _(Ú§í}ÚàŠèAÉu{¦Wçéï†WŽ™^c·!nø–]Ÿ+&øK,w3^à¦ýg®›ñÐ|7êb·|…ËnÝšEØš>.]“2¨Xâ„÷b[ª$NøÙ !<¤¬ø¾Àº'°Ù`ƒ ¼uq¾÷½ïU}ž|õ«_uë­·žß&õ¡}H’lêç7¿ùM<¾8¾ð…/4õYy/‚mÚ3Š6ùÈG'¯!çž{®ûïÿ›ö˜†ÝƒÐÀOí K,òÑHiõdÕUWõBL E˜4³ÍéDÂFò™ô ÙJ¹ô6˜¤°E®—i{­®ß"åë”8I„M½ü±%’~ŠC«Æìzyâ~Q¥$KÚÍ Ó,¦L~»"Žøa®È" жøã*ƒCÖ¸Fذô¤ÊWY˜5k–¯ùÝHÂ&+>Iáº"a“T¹ÞÝÆF{ŸJÍÚg^º5aÓ«bH3¤bU³ÎÃÜÚ‡¬’ŠÍó×ÌqÏ]1ËÍ}fQuËTj„.|³›Ðš&´ºIƒ·„ÍèÑ£½ÕÉÌ™3UvK+<‡m/iÛŽPø`Õ1ñÈJ¡Eø‘ò ð†'¡ðazŠ@é™R§r‚Äøñã}8¶kõª(œ–Âü¶·½­êެ&ãÈ‘I©ÌÙÅ œ9…üR.í«GÇIú^Û¤ô¸.¤KÖ-Q2ñMJ“}ë8e†Ÿ„6I= ø¸èBhCœð¤…2Bò“"&×Rï„CqÛ%“<ú$à§>õ)·Ë.»Ô´#ú p‡›€HZòÆd a’«OCÂI1«—LüDÈûÕW_íþñ¸_þò—þòM7ÝT=B›ÇÜaMiÃ>ùf68–$z›"ùCáÀÉvè$8IÑHª[)3J¶Ô7õK=ÇäMoz“ƒˆÍJJ”m{ä!­~¹Ù†ß-Ê …>O=²½/èöˆoÆDG×hã´ | 9÷EhÇ'NpgŸý5¹TóI[ÃÙ:㙤)_h¯I>¸!€Sw—1œE¹eœ¼÷Þ{}Ò±úÇq©8‚—çëOò"ã m›6ŽÄœSS&pàD/Ú*Û/y×$M‚—=öØš>†×]wßN+Îv‹8æÔ+´3eÊ@BîÇÚ9m‰xãƒ(£þÂëÿ¨?úBþÉ/ï§$á>M"²M¢7{x3®„ÛFiÇ8ñg;ÛÖB)‹#óÒ ±"ÌEŠúåÃC=´f #ïø¤a,O¼9,¿iOY|£Œ5ªê+vD?RÒ‚Ha®¶Í¤q”¸´o,Še¼0Õýˆù-¿™?i¡?1GbLÁwÖ²¤uÏØ õv(Ú$lêõs”bʶsʉDûIs:\d¬Öã1ÛùM:zœáùôí“w=„RL¨o'ËŽl-çd¬¤ú,RfÉsÙ>-éèOý>e ›õÙm ›¾ƒ{»5öâ¶øÌ¹, >òýinÚsÝ’y˲ï’aÚMØš6`V²†ðy”OVf™ ¢ØñÒ‰ö#÷“ !7P®´Ï &3¢8J:ò©óÀ &$ ÂiP8±Ó{ij6’¿ÿüç?^a€'­˜pl4ŠÂdgµÕVó“WH,„6Á¤ž—0d^$!‹0(Ê6'Hý×ÿ1ˆr”-­¶ôŒ²Ê DÇã;˜09Ó/È.ÒAPŒÅ·¿™ä9Ùª(¶<+Iò6(öà$¢Øiǰ(fˆVì˜è0i¢-‹ˆé:¿cN÷Ä)Ÿž$I\>™èÑž©?VUÂÉ „å>“£ÌÓ}Je¦M YƒbÀÄW„/m'I8v\&2ZY$¯\¿òÊ+«Q›AØPN°HNCƒPIR4’êVâñ)Š}AÚ¿¾OÿÄÉ2˜ƒ¿›zR¶í‘~RýrrA Is-”£ð~ø[·GÈhH˜ð\”¦˜ÄüDbL”½X<&|L¢ÅÊD‡9óÌ/WÞ'+ˆk}]¾Cd£0!Ò¯ù«mùF˜Pôö(혓ü‹Ò(q3ù~BðAꇢÄð¿!E¥Íé> ^ƒ$æý¢»0 ÛBå½$÷x×@#Œu ¡ààÒ a¡Îk ¯“…„:Å ‰£Ñ!’„÷m€[!Îcý³,Ž[mµ•[“°bìE!¦Mç¶ñÈáIñèGúd‡báÉGÂe raaˆíÛIe‹µÍ¤q”4hëc+‹D–B„•¹ ¿ª¹ƒ*‹1¡@x3ßÉZÒá]Àx«I¼f6iýb¢«€˜P6ÆKHXÙŠŽÕ2¶&Ø‘~Lè3ôW9ò]ðI +ïþ²c£vÔ|ÜqÇÕ<®h™I¤lŸ®Éˆú!ïSæpœîJcB}¢¿ɬuž«ÌÓ>Y¹ KÌ÷è'±q* Ïï¢ã!q‹Ì‘ˆ)Í\U/„q] D( šÌeA‘ÅDÏu<ùž4ïhÖX+Ïmæg·%l†¬ÛßmðÞnÍ i“G^®5“~7ÃͲ(O´.¶q4 p8)ÖÇ׳a#i²€¢ÇÊNX9äê Á@ʪŸˆ"ÚÂ’…ÉÂÊ[LDXñ“Q^^0Lr!+:è ê)„ׄÄןõ”j&p#,4˜¬ðR'(9¬J^¢¼aÃu& G}4_=àÁÀÏD¢‹øXÚh²€°’WžÇꨈ6üæeÏŠ8–äí¢Œób!žLº4aC\âBñl=‰æ~^‘ü¦Y/¥¥™—°¡¿ñ‚bE‰• „Õ`Ê…0¡å¥›¸ø•zE™UÊ€yä‘~’ÄwH9!àøPLÀp£þB‘ ™\'/(„XÓ@Ž`Ñ"$÷hãÚdW& }BØàO†v#–AûÛßüDBžÁ ºn+Xeñ<”3ðEaÓŠx=e±Ñ„>!…¶{Ï=÷¸›o¾Ù“‡ûí·Ÿ7_[DA¾')iuK§'|šp¡½Ò®hëX]Òî„Èaœ9õÔSkˆdM0RGwß}·o´už)Ć<»aÃX…‚ÒA{b«d¸(+ÖiŠŽu3,á¨ÞXÑŽe‘âÎ;ïôc¿Xù‡•<—kXQGä”KGiãܯ× #¸#õÏäú¶Ûnó ï]Ú‡¬Š†DnQÂE’’BH3ÈlQx Ÿ¨7ÊÆ‚BY!À(;xѾÈ;òÐCÖXi%6eq„8¢Ô A䕱«)+•—^zÉÿ®÷O+œ¤Å<†:„\¥Äê’{Áœ>†Ð&i›Ô‹%ÌeÄ‚–¾”e»¦&l$Ÿ3”‹¹}w€”ù†n¨›NøØ8J[dCê‡qkòpŸú"Žw í™-æÔ»Ì} O¼,äac"ãq#-lÒú¹Xºæs”± Ë ‡¹–­ÌXŽÇzͳñH{AôC=¡ŸQ!m!Ðh¿,v"Œ½¼ÃØ»1ÏØ˜DØ”)sÙ>í –ðOÞ§r[¿‡Âº¤ ³K@æÜ—úe¬Òï6ÒÓí'‰°çÊgÑñ°è‰6Á¼AbY„enŘ'¤$Û›x¿‰h†kàÀ|˜øÔ÷~ûí[³ Â;HûŠlÖX+ùkög·%l†m6Ðm}ÆšnÀȾ¹0\8}‰›ðõ—ݬÇäŠ×•ËKK&ƒIy—•™y§>œd¥ëõœKma: –0IŸE VGèüB þØÊ䟗/{^¼Z ”a&öÛaPBµÀ Â@ÁQ  9šµ,aƒòÙij0ñÕeBQ Ž“|Ø$6äU¶Y‘.Ö ž 視ÚDÓ_¬üCÑdâ…Ãjˆ&l.¬˜p† «¤2±F¹— £&lh£„#?©§V6’ïz~Db‰‹âž¢|AÄ1æÎ=^Ülwâå¯%F¶4èÉ”£'dàÌ$Xoy#¬¶JWÅeÁ³!l §Îƒ¬y}•óü£W¨õ3Ï?ÿüê3Tا­Šø]OYl4aÃ6>™„†+ÐäG?Oh1EƒðiuË}„:e†ˆÕ€ÿñú?=1¥?eÝ’Gô2m/¬_É“6O'V„aò%ÖpLõÆqÊfZØÊ  1Òq¯¨›° ·-‘/ ‚hˆ&Wcã(Û™„à„åßG®üÃ⊹ë \×uE»ë*„ d0í‰]´[¬oÄñ~X¶2cµîËôm}M~¨ê‰m¬çæ޻±ÞبߋҞÉK™2ëvÒè±Q¿¿¨KÆ Y¼#ßÔ%sjÙÉÖpÙV|â‰'úwábsÙEý6‹O¶¢ã¡_òÌ‘´UåæE~EXäC÷«]ý¾ ›˜OFÞ,d"¡•Q³ÆZÉ{³?»-a“×jC­É}½»|o7a£­kÀ´U„ DE¸ƒçCâl³Í6nYeð|…üÑ„ ¾8„\`5WoÇð(¡ýû÷÷>YÄBîñ ©Á@Œ”%lPî°z`@Ò 4i—!l`¶ !P² ˜P±ê‡’ ~ñƒÃê–(G„Â&‰8+0C° ‘­%š°aë«r¡ˆåFx]~‡Ä…\ç³+6ä›IXS¬¤Ó†dU?\I <¤â ÁŸ“…Pô„ «0Ú}(àž’+–d„“ ˜3ùÀB@ˆ¥Ø¤‚8¢ü†/T…H=eQ(ú/iåùdÒ+@ºŠ¦e ý²B¶EÆ âÕ›”²Ë$ô·á/VþAD \Éê¤\¯÷Ù ÂFO”QþdPçE&ƒLÈ´R®Ã„ßu{D)C9Ó¢}œÄ¶†AdCb#LöÅ ’ímb†r «´sÆ?ÝÎY™“#ccÊ"éÐ7ˆ+cS+Mä dÄÆJ]nÞ·ˆL¸…hõƒgŸ}vÕ²^ÔQeæüŠçwWVàc¢}2ð>ßgÍ<»¥Ÿë­È±|ʵ$¦,Ž’.Ö>Œc1ô¦mƈ•Xx=†1ç ™0,d^ÒšF6XèÐïBkòÂÜE¬Ãx/Ë‚P8޲X&ó/æ"Ôa(ldk6#¨¸ŽÞôÕÔà~‘wXÖ:É’¶îÏú=™…¯$6æq9ªø| ˦Û|Þ±ZÇ:O­Ë~—ùש/ü­„¢Ú´å43§¢ý†Û¢d;TV \ž«qÊ:êñ%ï‰9,LïiúO(¸±BVû/Ô„M’Å8i%Y+7c¬ óÞÌßÝ—°Ùz ÛþÜ1•†>ˆ¾N{Á͘`6´°á8o²Ã"Eˆµ¢6¼Ð™<„B9e_$“E„Á‹‰» ˆ‰PPPÅ”]“a8&ýLèË6bÕÂV Q$äYeÒÀ\ù¤ŠY¹zHVB7ðc‚¾:Œ6LÂde;Œ#¹4a£ëCâjŸ:r-üÄÚ$Iº*aCy8ò—íuZh“ÔyLØ"‰‚ð²ÂL6=!‹)È^&ô!ÙÎ=¹Îd ¥]ü(%MFôêk¸í@žÅç¾ë.ÚROYL#l°Ö©wT2«hâ_G›ú2q»ì²Ë|²ü  qš—…°Áo ÎA&Q(&’f6L)"Ø…ùE+”ÆaÆ‘ö*N[0>„[4 ßEAÕ[ñ´å c«Á1‘Éc$¢·—ÄVõ$ú!§} ­ lPdeË J½XÏH~ø¤¿Ñ¦Ùz¢}bM‡UJLX±FÁCêõA‰RŒâ€Ä,+$œöჵí i6a£•bž1ÇVh,@y/'‰Lö5É\G”,ê ¡îä{˜V™‹$õ‰0ü›ßüf¿rÍuÒd‘)&šü„ ÇjS¤Q„ V²s1Áw~:|ÄĈo”OÙªˆ¯Xs„éɸÁumI†!+–a|ùÝJ†EÙNÍø‡õWL¨kÚTX¶2cµÆ5‰ì‹=fJþïpÞ ÷뽋ZØ-sÙ>-åJú”ù÷“,¹'¤;ô|Vœ«SÇz[”×Ð ÐC²ˆŽGø,ãa™9’{ÌåÝæBŠyŸÚzW6øÝbŒŠ‰v! ãZ³ÆÚØó›u­Û6«n2ÀmuÚšŽ“¢ò'DM<÷e7û‰…y¢u©°­²°ŒA4A•ÇÙ°Ä-BØð²`’Ø\™4@ ÈÉ/1†ƒAøÂduƒÂÊ-/™˜ðbC¡-CØP¢ ³B¦÷hòLîÝ%yæ¥ &™(=IÛ/˜H0™buP0”4„°IÃÌÀŽ— ˆ&lP€µ¹(÷9‚•ëi¢_pa¸®LØPÚ¥˜>óÒã¥[½$,&ß¼Ô‘˜Ù*×õ„,¶‡0HÒʈž€¬¹â?&{‹CÑÊ„6÷ ÃéU¦zÊbaÃ)OiíçâÂӴMº ³Â•Ut\HÏ<„>R˜•¥FI£ =¡ÊšGMž¤Å‘ö*G62“{|&6ÒobJ…ޝ·¦Aô1þhk/ÚIÒ)R¼cd•¿„K:ÿißÅÁ+–\²ý—^Y™ãê­õú ÄÕþ©ÒÒæ=( º&šMØOˆiÞû¡0ŽB€Cʱ‚«%FØ”ÅQoÔÏJûN_ó†×DLZ{ÕccHD6аaQ ÿ+1ÁBQæ z¥^£:žvƯ¯ó]Á$b@Â˶ˆ¤ñEÂÕûl%a“uìâD—­ìX]o<'!bck;›2e.Û§ëµ™/Õk§IÖozþ)%ssÙE° FÈ*yÇCÝ?óÌ‘t½à[Läbù¼kÕʶzMìhÂ&¶-YÒÑV¥²PÙ¬±VžÙŠÏnKØ ÝÏ{Çp7úÍ+V³‚9õú9îéKfºùS»ïÑÞ­"lêa^„¬!Í"„ ñXÁ–}òüaU[ÌMÙþ€ãGD6L&dû ä“]F-žÉ|lˆÁŠÁšÏ2„ ƒ{4Q^øµ”%lpFȤP„“5F 0ÑÇ?Âý÷Ýç~òº³L‰'±Ø= äæ›U4ÙºS°a¥‡vIB‡G…ë°]°ÑJuY©ºœ|×+!샆 E&d\O#l´é¬6e• H˜.íÅ!ܾCÝñL$Í:HOœê)‹i„ Ûˆd‹L˜Gù…Xè­IiGWê‰L^†ðbÕ¨•}ço;×@IDAT~Ñï&lØg¯-’¶ 26°”û(8YN¶’ö¨•]îv6iTÚ[²]‡eW‘õ*¨Þ– û^R¬:¹Ïv.ò¥ûV©Â6Q¶ëõAžƒd%l’V²[AØOÆÞû”1´¤¥í±Oüª¾ a“„£®ž‘T‡Ôó ÿ³`”‡°Ik¯š° ­ EØ„>Ï(§H^†x±íŒBFûÃá·é3Iã‹›ö½ ”X,êtÙÊŽÕõÆc0ì;…°)SfÝ'=6‚•´¿¢„ úG¦3fɶ(æÐ²*Íj™ç'Ižñ°è©‘„ 落è÷¥6º^‰Ó¨±6öüf]ë¶„MŸþ½Üj;v[~a ×g`ïLø-]°Ì=üíiîÕûæ¹¥‹²³“™ï @í$lÄê&Ï©P!tE &ª˜h‡‚2IÁKNï!"åSœ‚’éi¤ÁI1ЦÓZ°vÀ´)CØÈIUÚ*E?‡|µ°aàg•E‹ô™P!LôEÉ–gQV!¦ nPzµaƒ£º˜É=/0Fð½ «­õýŒ"ß»2a£‰ );äí-&¼„i/ˆÞv ÃÊ„Œki…L4ÂI™\'>mïÀô'Ìð›—"Jl¸UCÌœcN ‰‡`qÅi&H=eQã¢Z9ç?½Lû@É’LÂF——ïiÛ3²äE‡i4aCÚ²¢Ç¡°ÎSÒwiZéÐa‹6ú„¨2[¢Ò¶˜ˆÏòK_€DšEذõ ‹"?!I“P@ýÓdIÚ¶¥vo‰Jª'mí’Ìõœ+ª_±åÝÌx#šõÊ.c„MYuü‡z¨r*ÕÙÕ<•ù¢‰ú´öª¯ÐZ­Q„MÚ–(ÞõâƒO¿Ÿô8Š¥!mç¤ã$‹hZØúËØ$ùtcβ‹â˜4¾è4Ó¾·’°a1«Ì–¨2cu½ñŒ:°!OEˬûd£ÇFò¥çKY¶D±w’rQ@ò.£ cAÍ"i!iu>@†õÆÃ2s$©—p|ÕÙÒÄN‘-Qšô—qM×k#ÇZïfï¶„ À \£¯}Ð*Þ©m ']<ÃM½nŽ[0­Öb!KÜ®¦Õ„ $͸wðØf`i¨Š61ç·`ñŠ¿ˆÞ•9‹LȤ6r¬7ƒNe;J¸ÕG2¼Ä&OžìãwD{°/C؈ߗ$GÀeNÂ/&¾¬TÉñŸì‹Å¼YDûþA‰ -p„°!<Ûcî«XáhA)…P@ôÖ.#lœß–Çi%Z°Láí}Æ(Êr¢ m5æ<“ö->—8%gŒ¡È„ŒëIމYá§Ãô#™<G& ò&Ô'„¢_¸þBåù•~šâFŸ"ÃïV6zÕWoÕ Z8…2°ÍD,G´¢Á$K,Ÿê)줫͒ÓVˆu²~oaÃ8#c…ÞKŸ5OIá¤=&)TE m&´Jas:¬ûQ’ÓaÊ#J=ß[AØhB…÷ ¾X²ŠøHHs \Ôé°øÒHK;Éé0Í!}žÏä:=~¤6¼ÃÂï´ôoÆ®ßW,å„;Úšl–ºÕ>lˆ[GÁ*LWç+ïwÆ%0B:Ùé0Û¨ñcƒh¿]±qTÚ¼kØ®(ã+ñµeíî”ÊöÄp¡@4¾Vi%aÃû@¬Àõ6Ï4§ÃeÆêzã1yÈBØ„ =’÷zïFYÜI:AO[þê9v™2—íÓR¶Ø§Ì—¸—äçOo×[%=æG”¡=X!*×Yw]ó’FIÒxXfޤ5´¥¶Îs§ÃèPŒ 1AGâ˜û°7c¬=¿Y׺5ahƒ+>lFí3Ô­{Ì0×o•¸¥Íâ9ËÜ”Ëg¹—n}ÍÍ«ø°éîÒ,ÂFNׂ ¿5|/cM«‹¢„ i1èó¢Å*a—]vñN¯äDLüQXE˜ð°"#„ ×ñë‚ÒŒ™?‚YžœlÀo,qä4~sòdÂàÁK‹¸E í+‡?6AçEEÇ*ò‘t¬7&ƒ²u„U-VQøù„a€”U\˜|Žt„8ˆíEÕ„ åæÅ‹cG0dÄ ÂуLÜ„ð1Âfe†'J&/JÚxqRÛ£¸¾L`CûÚ4ò+±PdBÆuÒÆä>täŠÕŠ8~ ·1É„v!§ðOÙ.Gºø²§ÜüÖ'ùð›£q’G[Úÿý½Â&}Œû­$lxžLØøÎ8’aúxMN·Áï S4¸^oRJ˜¤=ëÜCðùrŠÅãu•UšAØh b¤yc< Vº²X I{¤|±cw‹6á±Þáqªä—±]HÐð8WQÐ;…Fû¹!L+ú$}A‰¥L¡e'Û^¤_òŽ´âœÅ{A‹vúÍõz}PÇ•:äï(:-š¥žiײ]Y›®ÇüŸàDœ…‘°Ñþ[b~“G9¡±’q”±S‹ø‚àš&]“›²8걕E ò§…w/cç°aÃü»R[ëpáw=†ÅÈÓM®2 ßõÞE ›2eÖí¬Ñc£Ì—À k|Æl!…¹Æœº’I~þØâŽî‚/˜Ñ£Gû~$NåI'«õø’gŽÄ»ˆÅ¤ì±Þ1ëA]ïáÉ{Ík³b]6\·'lhÀê}ÝбýÝêoì†m>À ¨XÞ _^âf=¶Ð½òßyîµÉ‹ÜÂWº·e/tå_39ª[Ñ-Î×l nÈg†—LÔbÂ*^h}#lˆ‹Ó[&ç¢(³Š'ŽqlùͤŸ „d/dH—¢„ “oYEÀÊIz(E”A^º(É0¼ V®Äa3«•0äˆøŸ‰aÇ}!l˜ÐÈD‚ëZ ´Î¨lÙ¿AÜ3ÂfeÂF+Ür2˜žtÓ¦b+òbeµ§m¡È„L®ÓV™äB®¡bþ*dMl‚!MØ–VÊø:Ò…` O¼"œˆ>¤ž²ØÈ-Q<ÒHN6¡ 8‚;´ÏwÞ¹ªhP7âS"IѨ7)噢$m1¢Î9^I²„ò7#ÿtûÁ‚AOö ž–¿¤ú V²ˆ‹0q„è…ÀÆÂñ•- ¬ú!Ieò7ƒÒ“„¢„ ÑVL´W¶bBB‹l;ªB٤Цë¤G¿ƒ0%<'R&Qò¹6ºž¨Çk¯½ÖM˜0Á[ê@ ý¨be(í—TN‚£ó^† a! $¦,Ž´=°’2ñ>…äbü#m,+¨K„¾Å–Þ,¢ýÓP÷´WNñÇwÜÑ'ýÐ_7IØ‹dŒ¼§x¾nä %Y$i¥m0/cìABÿ8«­¶š7ʶ6IO>)+Êqlõ]Âdý”ñ:FØ@àËÑÅXõd=ñ/‰°!Oâ,™ïŒ]ôUÆ æ£X¤a•!B9…ŒãZ™±ºÞxLúi„ Ûæiãód¬QYgþiïâ%lÊ”¹lŸ&ßI"ïS¹Ï˜}s¥ýÇê’¹8cvlž›?i‹@I¿ÞgÑñ°è‰zaqJˆU ãyG0¯"]¹'>z¤ Úé0×hçXîÓx×ðÞ’S¹Ï"¸Š4k¬•ô›ýÙ#@ìÝ·—0²8ª_ ¦ ^ªE;}©[¶$ûjeM]ðG³ŽîÖÖ4ü¾¿rDzQçÂIÐ!lpÖÇK"BÊOútxLÜa^ù®–›I"¬´ðb=úè£ý%^Øì…A£6ò/žaÄ‹ ~=ÂSðu_7qd5ED,x’¬ZW°á’F’.ñyÁ†§ €Ýzë­Çm‡c»Ék 0C.¬˜d¢Ü‡"„Íßÿþwo¡ÃD[&ð„ ‘Ðä]Dà* F˜~ÑßIØfMÉ&"Þù³Æ+ª4£’gÌP*d5škz ¥"<º›¸¢Lò ÍÄõ„ ¬ÖÄ„þAZX”h‘ HHØFoC!~脘þž¢¬‡pLÄé” ©§,6š°á™9)ä(¿c'IŠF½I©6N: Eò!§ýÄò»V´í‘VZýÖSˆÏ¸‡Õ–>2˜ëI¢Û£V:$|†4ô*›¤©?É/ãct(ÚgLxßøqÑ ‹Aá}ÊF{ f¹^ï³èxHºEæHÄcìfŒâ•k¡0Ƴ(-–÷Üׄ ‹Çz7Cb~J³ÆÚð9ÍøÝco#Vu‹f®0í?¼âeûŠÙr«Ç| ar–&rbɼSN æïŬiÄê¦,l l`¤üq0©FÁ„xà^#_&Í 00¾¡@þœ_9‰-R¬êŠ2†Kû-V-°Ñ¡ {Z¼F߃|áè@“N° E6r:$j¶P°ÚÚ¥(a£•Ú¢Xe ze™U©_1™¤é}þ ¬8f¢ÆKMOêYu-k±É¹ža®Ê6Våµò@ýb:Ë i(L˜<$ù«@i“U˜¥/mN`Ã*K+VÞ˜È0ñK"A”Ò$Áá'§!lkôh„`ÁÄ8¨'`,dc¸%QoÇxÏ{Þã·0¤º•òÈ#Þ²ŠU´¬"+²Ä)TœöÇ–J„ -c‰Mp² 3-(>§ŸþÅÊV­mkÚ8a˜ôÒÎBÂZâÓ' ·!#tÿ ½2žÓ˜Ä"º¯Ö«QJ’|&ik‰Xû¦Lô3ÞA:_äƒ2ñÞ‰qî!X¤@¬ŠÅ ×ÀžRØR$‹¡áê Û› „\‘ð(#ô´>MÝbí¢­%Pô8 …:Ë¡°áþŒƒ2×áš6|ÇÂk+ڬƋ|1ALëm Ä‘cf“¥—ÅË£“O>¹êèŸg"ä‰ñö¥Çõwëÿg‹ýEãH,Æk¬‹b'.q_¶hòLiÓ\Ï"ŒåR·¼3 ¾°ìK!Iƒ­RŒwa¹’ÆQ‰§*ÓVßÂ1†ºeÁŒróNÅ·Þø"Ϩ÷‰óbúL¬=HŽ‘9iéÖëçÌèçlïÔÂ\Šq±‘wiÒØYd¬Î‚—à´Uúc(¼ß˜3Jý3G¦¿ eÇFÚ[`)RfÉÙ>-éèOý>=µÒn!j…—plåa¤>¥NîéO!U¹[¬ÓaÓ¾%½¼s$‰Gþ|eÞ ÖŽr¶‹ë ¶jUÜׄÔ; çš|§ b±ƒõo’4k¬Mz^£®÷8Â&$néÅk¶D!÷ä­kô©ô²ß-ja#„Mö'å ÉÀ%[<0ÅÄ—Á F©d"ƒ`‚œÇ"qð  ÂþÕ)äo¶àÛ¾˜ ¸j ýèa£ïwÕï6Ô«^Õëä²ð2““1b«™š°™­}š´9, ØîÒHa‚Å (–BSÒOô3ôä5ÜN¥Ãµâ;c&}Åe^[85âù2ñÒ+±t!› £WbáÚq‰7[a i3(H±zmGÞbϤ_`51È8yNcñ¸†ˆ2HY‰)Ò B™ÆŽë~£,² %K[á=XסøR¦FÖ„ mÜKò´ ”TÊD¼˜ÕSîôü<°(ÃA1¡_óÇØ*ý±ði×#ã"X!ä)$ŽÒžŸvOÆ0òHý† iqóÞOM؈ã{ÊÆã(JŸú¢b‰ v1âöAÖhÑ–y­Ò²æ…²2þÐGÁ‘…<Ò®±šùn Èo#ë>KÙ‹–¹}º^þÙBÇ'm7kß¼ðõÅ<ÚúG–±¾^^ŠŽ‡2¾ä#1F3¾ciÄXÇØ”Tް‘…-Èþ .±äÍ:?kÖX[ã¢÷°1Â&±í°/–Î4븉®ß¨^‰áä†XÔÈïf8&íN%llY•¶—AE€=ãzõ’É*&Ùy2ˆA$!ív –´ VÐYÙ‰Iw$lP‚’üIt‡²Z C g Ð.†-7_®œ:É)IÂ|…0œTÔ 9ëõSÛbÛ›ñ;ǿøÅ/ÜÒ¥KëG²À‘GéûËË/¿ì.»ì²ŽÎkZævØaGû¤ïÏŸ?ßýà?p ,HZ]öí·ßÞ½ûÝïv“&Mr?ýéOÓŠÓ÷¬¿;÷ö·¿Ý9ÒMž<Ù]uÕUÕzYsÍ5ÝG?úQ·lÙ2wÞyç¹éÓ§WïÙCÀ0 CÀ0º#=ްÙý‚uÝâYË|]öÖÛÝyò”îX¯©eZk­µüý_|15œ¾yøá‡»“O>Y_r—\r‰ûË_þâŽ9æ¯qsΜ9Nª g?Ê!ðùϾJØ€-›tm~ó›ß¸áÇ{’2¡+Êgœá C´|ðƒôJtÚ˜Ðê²ÿápôÙ„ð¼þúëu–;î{§÷÷vÚɽóït½zõrO?ý´ûñÜp ÿøÇ?º¸W^yÅ}èCª¦ÿo|Ãm¶Ùfþ÷<à¾þõ¯WïÙCÀ0 CÀ0º#=‚°ab¹|ùr_#¶TS3ÆÏ÷¿u˜šÝðG^Âf•UVñ–3ÅâÅ‹Ý /¼à.¾øbwÿý÷»£Ž:Êxâ‰þ¶6‚Rã>;]k\I{NJ­&-BdÏ<óË®OŸ¾nüøñî¯ýkx»îïƒ>ØAΈ¼öÚk^¹†ÄÁÒ&mLhuÙ5aó³ŸýÌÝxã’íŽüìÔþÎ;ò“Ÿü¤Ûk¯½ª¸Í˜1ÃrÊ)Õßú’DØ@Ðl¾ùæþ1¼{ pº‚ì¿ÿþnŸ}öñYýîw¿k¤{W¨4Ë£!`†€!Ð!ôÂÆ¹köuFôqƒ×îçú¯Ö×ÿèÕ%nÞó‹ÝÂKÝ——TH©•&g#/aÃDóŸø„ÏÕ„ ÜYgU“C¶D}ìcsk¯½¶»è¢‹ÜÃ?\sß~”C S¸r¥êÙ±[MZ„h IóÌ3ϸÏ|æ3áíº¿¿øÅ/ºwÞÙ‡ûõ¯í®¹æšš8icB«Ë¾ÝvÛU·DAØtºtb3fŒûÚ×¾æ­Â4~­&ldKÔ’%Küö¶W_}Ug§c¿ëþòéOÚ=ûì³›W˘!`†€!`tÝž°é;¸·¼n?·æCÜš{ qƒÖêWSó_\ì^¾m®{ù޹nÞ”ÅnɼÛ¥ju³y ›|ä#îÀô(œ{î¹î¿ÿýo7C¤³‹Ó‰ \g#Öù¹k5i"R–°Á ã–‹'œpB.¿Jí.{ˆE§ýî´þÎvX¶bbaƒÌš5Ë­ºêªþw« ›N««¬ù1Â&+RÎ0 CÀ0Bº5aÓg`/·Ê†ÜØ·w#w–½æ÷ô{ç¹Éšéæüo¡[º {›Úä%l°®snÈ¥v5ÁŒ¾_¿~îÑGuléÊ"(%cÇŽuƒö+¢S§N­n­Ë¿^V­IÿùçŸ÷é˶½0^#¸ÕV[Ím°Á~» Ž¢é¸œ¶Øb 7eÊ_–0ÿI¿Y-§ü(}O>ùdR°•®7¢^²b>œgo´ÑF^Yʼn-y/"1Ò¢OŸ>Þ¡7[Á£hÚYòS–°ÁÌk¬áûbóH¬ì±øÂ:–vÚ5,ù£ŸÌž=;-h®{ÒO <žzê©Äq¨ý1kà 7ô~`yä7oÞ¼\yÕûÛߺaÆùKwÝu—cKçi¯e Æ%ú[iüqïL˜%m‰ÒùÊò}½õÖó±Á ÖŸFåp¦ÏóéÏlçË"X­³Î:þïÿûŸ{饗R£å%lðßÃØ¸úê«;|Ía‘“õ½%©Wv gŸ†€!`†€!ÐÙtk²fÜ;‡»5v’©¦Ý9×=ýǤM¦]4PVÂF”²¤bâOÇ(¿ÿýï}°;î¸Ã}ÿûß÷ßñq3hÐ ŸA?ùÉOÜ-·ÜR“J3+Y¹ÅÄÅOH ”}÷Ý×ÝsÏ=¹”¦o}ë[nã7ö„Ä{ßû^Ço”/Y&N_ýêWýD¸&C¯ÿØ}÷ÝÝûßÿþ•ÌÿÉãí·ßî(KÁámo{›ræ™gz ǼëÛwÅÖ¶EQ )#¾>Pà¿ýíoËí•>Q<¿ð…/¬t]_çêkò:|ë[ßê& ÜÔe9\n4ÖláÜzë­ýØ"ù#aÛ8ôÐCuu¿DgŒÊcU [¯®»î:wï½÷úSô âDÀ mF¡"^¦¿3¶Óÿ!´Ð¿©{üÍà´Wוûa3dȃ´ó?ÿùÏ¥NÂâ´'!‚ä¹´gÞ¼{bN‡;ì0÷¾÷½Ïgü’±"Äœòá7’¹úê«kÆÖw½ë]îˆ#ލÞ÷*ÿ ÏÁé¹çž“K5Ÿ¼—è#ä_¿S¿!mx'jò†SÓ N’„Óñ'»곟ýlu<—딇±ýœsÎYÉÿM–²ãt< /¼Ð]qÅ’tÍç~ô£jÛáÈdb†€!`íE Û6ý‡÷qkí7ÔmüÁ‘¹~ò—ÓÝ‹7¿æÍì¾G'g%lPžYM&(#Ú)1h&ÒÊ(%( .¬&'Û*¸:G¡LTQh³žhÅê/+¦Ä›6mšÃŠ#&/ä‡#ˆµpêÍÿýßÿÕLÆõ}¾£¼±5,« {ì±>8þ}BÅ\§sùå—{gÎúZ’‡Â@yÇVVb‘É•#p Ë‘·Š „V,ü×ÿ±â‹‹"ø©O}ªF©#œ1´'HÀ˜H˜‡zÈ}öÙnÓM7ußüæ7cAý5ÚÊbšd%l’ÆÒ–²C†„„M3°ÖŽj;î¸jñtÛà´#Æ$ôÉZ¿(Ýrr$–mIm‡ç~îsŸ«ylÑþ9þ«_þÒ ªè1¡-1&2þÓf5y /ר£þóŸ5lÂR& H"È–p\ÒάIC$9eÄúK‹<½ë0òw‡?üᕈîËûE†Ÿ` áÃ{Üga"I ½…âÔ8ÞƒšÈã1fC¬k’/KÙo¸á†êBÊsKÈOVÆ”Px.ï]ê…:€°11 CÀ0 ö#Ðm ›!ë÷w›TÈšÕvXaá‘êWïŸïž¨6sŸY”5J— —•°!“H,_X¡FX1d Œ0qLSÎN;í4÷¦7½É‡ÇR€eäÈ#ô„ ßÙzI"Ñ OÿøÇ?þáËý´ÏpB Ñså•WúÕl”Á÷¼ç=ÕUÕ;ï¼Ó[§Hz(l1¬¢"(÷Ýv›WröÞ{o·Ç{TW߯ºê*ÇÊs ”%°¸ùæ›äÇ~ûíë¶Ýv»jR¡Ÿ ˜ǤbL”L¶Ð@Ž‘62räHï”SV˜!NxXï¹çžnŸJy´b*FÕÌD¾ì·ß~žDãV1(Nl5ùë;ÞñŽjžzèÁ Q°¢¾ «O"ŸXO‘'VÙIs“M6©*qZ‘)S/e±‡”Bù« ,ʰ#ÿX€í°ÃÍ[H±Z®W×ý„BZÈmÈ‹ÿûß^¥²Îv„˜8‘I“BÆä!lhk`ŒHÿÂò‚Uu®üN¬thSXPos_ùÊW|ÆÆ$mL²‡„M³°ÎBØgu,î¾ûnOFž|òÉÕ:,¬²ˆV %<Ä íåœí¥X‹I›B™þùÏ.A=銕rÒI'yâ ^ç>¸¨´™[o½Õýç?ÿñ1ýKÆ}Ò¥ýf%lJQÂÆ“J•…±¼¤½ÝtÓM~ü`|MX„ãRÂFòŠ!ÖelAc dglA WÀÒ‡:c|âá¹#B~sñ•E„¾HÅâž1UˆúldÁ€EêËO„z«¶Q”™6 GàÂØIÈû“±F¬$Ù…ãb‘X{‹•]úÏcœÖ¤iilc¤¹<Ï> CÀ0 C µt[ÂføÝ6grýVé“ ÑÅs–ºñg½äf>Rky‘+‘,÷¬V+õ|ؤ)g\p_1,P^˜½ÆzàOúS•,cËÃõ×__“<[Øò€hr¯L½”Åò HŒ¤ƒHAÑCtžý…”¢<Å©pÄ Â IË"„¤Í§Ä/zJ”l—ÔÊ©N?mL²‡„M³°ÎBØPÆ9MXQøR¡ä!8B:ܦNà]Œ% ¾(ÐEú;[t¾ô¥/ù´ ° ·$jk<åñ‰ÿŠ6^oyË[|jŒ¿ôò"áD>…4Ç%M*$YØi@…"cÏ„´„€Õ¢1Òc[̄ؤ¿Æ¬Þôv"—ôëù°Ñ–‡”™úÓ¸@p±õ’­MH‘²Ÿx≞”!þ…‘mQ’žKØ2~x†‰!`†€!`4nKØŒØfÛáÜÑ…Pºÿ´©nÆøZ¡PB©•„  Ü39F9™;w®'Dá­I2&ìøG?~üJЇ„‰}jå^OjuØßýîw~+P¨l I4¿â óÝKœ˜@‰/È'òWO4iÀ*¯‘Æ“-hL˜õêw¨Àa$«µ((¤¡ˆâAZ(áÖ/³¢-G:‡ŠQ˜žþ­àØ‘Î:¬|‡ ",ª²(—r_>!–XIf™zBÊÔKYìÅ–Dâ;Cò*Ÿâ 5V!-Hã—•m,ø/ …Õy±¼­Ð„p‰‘u’Ž„‘-QrO¹×I„M³°Öí5iK–(Œ¡œyæ—«Öo¡¿‘0¬üÖ„ Hô?!c$ ŸXI`™@Cø!Eú»&B~øÃzË@Ÿ˜ú‡²ÆB8ë1FËôµ(a£‰fÈHÝP´õe8.e!lÂ8’>ã ‹[–ØÒЧ´&Û8©G ±@ ’â|±¼ÁBF¤aÃûËR?C1ÉÛn»muû#VIXê º½%•pl…¨!á¶(½ªè˜À3L CÀ0 C ñt[ÂføVÝößíz÷[±‚™ºe‹—»¾4ÕÍœh6‚Y IÇ·tüôŸI“æš@9hÂç¶z‰$%d+¥â eb‰­ˆK\¶* P!lŸ`\O4i€U «¯1ÑÛÇðk%¢8&Óâ`˜­(@1‘•d¶F`½,‡¤MôøZiâV ((l/ a½Šg^vÙeaèï²õR{”ÈÂK¾‡_Æh Ãéßš°!>õ©G¬%°DÂ¥»6ÍÄ: aZDÎÚ‚ ¢⬞h+FHÞ˜ìµ×^ÞRƒ{øˆ‚²H×ãm>FÐò9ñ)O[%^(E ›‹.ºÈ[[2.3>Çâò Ç%=öhB^c³Ž$­8À*|§!db"Ĭ®;üDA˜ ¼ËÈW©GØÈ"B.-Œ›|êmQYÊ.y'ÈÔ¿Þ¥q ýÉI\û4 CÀ0 ö Ðm ›U7à6ÿônè¸þ¹}íéEîÑLs³Ÿ|Ã9n®º@àV[Ø$XX°‚‰01EQ­%Äá*ÖX=Ä„UJQ¬ðÃD¼žhÒ(1IZÅÕ œŽ§<ëë(TH½ÕR!BÅH§û®}Ÿú„ˆcÛ—v8¬ËUñ%ݲõR{|G@¢åÈM]BØ`ÁiEI‚… –6a{în„M3±ÎBØ$YËTñ=ÂiBHÖv«hü4qR]L´Å‡¶:Ó}EÇKêï„…¿^{’mga{ÒÏÉò½aÑ€Ój.X,%‰¤ŽKšXH"l´õ‰N_[4éëIßµ¦ôWýÎHŠ»žFØh"¦.WNcÄ÷˜&vt{K*»äéÍo~³ßnÅoä´(ÙEùh&†€!`†€!Ðt[ÂfШ¾nÝc†¹u– é)ÿ˜å¦\>ËÍiI®x])p»ÙòV(˜~Ï™3§¡Ð5›°ÑÇ!l˜ã¬2&ZÑf÷I i°bŽo•PDá -3t8M섊‘—ô'ÃOÎö,ÒÒ„_78ýDØzÅ,²Ë,’•°IªMØäÅ^o#¯¡Ÿ%É?þ%Pt8í ‚Ç›üN‡›‰u; ›´åò6´·¤þ.$4a´?~kâ¡„ >IJ®1!ãW8.é12/a£ÇQé¯ùNÆB «7¬iywÅ­SÈ¡K/½Ôײ-Jo‡ Å &öi†€!`íC [6LJzõunØf+›Ï¬á­Õ/Âó_\ìýþ47ë±…ny…¯é®«Lí l´CY©ŒI“&U·ɵ²ŸE ›²[oÒò­Iƒ´-Qœ¼³Ë.»ø¤’¶D±‚Ê–-œR"´QVþY¡×‚Y;<¹ÏŠ) J(ÚQi¨…aëýf›ƒqt,ŽŒõ*°ÞŽÀ*;8Y¤l½”Á^“@1?0YòŸFgîgÙ±‰µ‡ˆXØ$mßÓd@,￞–’üFq/&Œ±r"[ܲ:ì—´Òˆ…”3%®|jKœ¤-QIe—4ø¿L´¶E crÆgø»üûg†€!`@·$lÙ#û¸Õwâ6>e5×gPo¹ý\:™{òׯºWîžëN_Y¹Fê¢[MØp,3'1áÄß +—l°G/«ÓÚ¬P%lH_œÇ6ÓépìÄ&)ÛùçŸï•)­À¡´£¼ëS ˜ä³¬­•ØÞ…u SêQQZäxݲ„”:þ}ÅO….ŽE5f™Äé$l›Ã~=2õ¢ ›"ØË³Ã“»¤¼E?5a“¤}Šg†G8 ¡BÒ›$ ™´¼sOÒOŠŸ×†4›…u; Ê’m\Ûj«­ÜYgU}×èmLEú;ýœ1Uú4ãÕW_퉜åâôâ\¤]„vâ;Ö’’þ$ãV8.•!l(;dKß¾}ý»(v¬÷zë­ç}ÑÿpLL!¼3åT&y…Öuû Ç7M ÿío«:¸÷‰Wþ5êXï,„ ÏK'¶¦=Ú·Ãn¸¡z*äË> CÀ0 C ýtkÂFàí?¼Xñi3|‹nÕM¸kTöKUdá´%nö ݬG:¶C-šÙ½-kV6ú„©o¼Ñ±UÁ[Ž• WùQ‚9±I&ínù‘²„Ÿ¢Ü§)$âï¢AN‰"¶Ès'NœèýÍL›6Í“¬†3ÙGbÊ»¿ù’äí¾ûîóǾò¬}÷Ý·jnOôï}ï{5'˜$)pXÉ àƒ ýeÔsäŠOÊÞ¡bäLø÷£Ê±Á¬È"øA¸¤rêÎUYÁ† ‘m]!‰Äqèrb‡6AþÙ²óÎ;WXí¼·L½”Å~à 7ômrü€Ìc+>|XYG8–™ã™³ˆ.«ˆ›o¹Å×û[lá<ð@ïl˜ûœX#`I%‚"¹õÖ[ûŸ5Xga•´ÑF¹÷Tޤg»’HŒ°Ñäþ„®»î:7yòäÄS¾$-ùlaÓ,¬ÛMØ€Û?Q¦ëöÙgoY#cÉ-•z‡|)Úß9]‰ñ•þú¤àꫯ޶-QXôA,‰eßK/½äLjdÆÆB¶AŠ„ãRYÂF[Ê0þs¢ý™>Æ €‡zhuü§³…UD× }_dÔ}ŽwŸHl[„c…Ü¿öÚkÝ„ üx²lÙ2?â`\7Á…wã"c§Üß3>±Ê¿L8Vët°œÂ–m"E}t:é„交mŸ†€!`†€!Ð~za³Ó÷Ƹ¹Ï.r‹f­ ¦^7ÇÍ{á‰Oû«£59ÈKØ ˆ²Â‡°2‡r¯E; ½óÎ;ýdœûVödz5Ai þBåd`åZ^ÙîÃïÐ?‹ðO&Ôi ‰˜®3¹Ö6’$þ?ØfÂZ áYEQÎ#š4`›Û!Ž>úèêJ.i‘6Ö&XJ„¢Í鱞`ë¶; „ ”›úb…V ŠÜرc}\,*˜ø#bi‘82Öñ’¾³JÏiOcÆŒ©QB(ÇÔ©S=Q§·øèt°®ÂO…VLÈ·l•㘩—²ØK>p¦Œ ¶öi¡¼Óù(mWßOú.§Î`Ysj¥¾ „P¤µ°5Ží!rÒ–¾Çw,p Ù£A,mð‰Á3˜å$ÝÇ>ö1·ÓN;UIâáì:‹Èqã”.”¤1pºìï®´çPµô9ž£ÉBmõtr™Þ¦ó=æß¡ iÉvE±Ä’8X ’·°Ý4¢¿Ó/±ø€ô`k~YÆGÉ_Ú§l-‚Äú+¯@ÌIØŸ°(tgd|Ç¥Ã?Ü÷Cž‡_4!Fp°.Ö›iŽÝ%ŸŒïX˜„„'ýñÏ—]VõŸ%áå’š¾ŠÃvÝç¿x/ba'XKùÄÏ㑼¹® ~³•õó•>(Œ\CHÿ©§žrlãÒ„=÷ò–8ˆ®|× &ü61 CÀ0 ÎA G6\³A ò72©æwOù!Ƽ']´&ó¡oŽVæÂ¥˜I={ý™ˆÿ?{çwÙTþñ5W3fƸ “Ë0Èý.E%!Š©(·ˆèé¦$)%•nº E%·¢!„JRãNÈu܆Á0sÿŸïâ9=ïš½ÏÙûÜÞóž÷÷|>ï{ÎÞ{íuù­µ×ÙÏo=ϳxq.+)i@|„—sþPJP–½õQÙ2ÒôäKÝLï ñéÀ‹¥%¿AAåk”(ª•ãû°0¹ë®» ·¿L¿´{, WÚ›GJÕjwÞ5ÆÙÚk¯Ç»À¤ÄhÞ}(‘=¸Ø@äm?žw¿¹È0F  ºEÚ‰u;Û˜6û„ö@R3ÞQ¾Ó ÀÍÔ bÜ,¦;ŒÍ,9û¬³b@pæUÈþ,O&UÜ+!­»Þr¤uƒØ"† ãžàì)žWž9âÝ@–ðÛæEw°1cÆ„9sæäþ¾‘‚2–9ë·Vcc *ü¦A¢·:ÿ¢x(B@!PACج¶OEÑÚ{©*ÏV O9ò±êñ`ú2P›^é“<Ò íó[Ø¢œaã_ÈQXɶ±€ÕñzMúû^ÃPí)‡@aS.—r©qqa·1¬>PÀ?õ©O-B"øåþùÏÆç¿\)JÝ+xײ².¾½‚Ú!„€B`  0h ›Îy6Üö³¥ŸZZOSһݦ¥îÇÌú‹4°m’i:+ȬÔb%Ī-q$, ¸~´Ò§áîStaß§:TôaÀ~khH\ê¦V‚‚c¹…µ†óåÚQGµÈÖÒƒª“ac±ÂÚ}÷Ý£.²Fî±x–æ „HMB@!Е Â&u‡šräãáÙ[gue§´»R"lÚpßüû‹4à¥ü裮Æê[«WŽ írZÔ +n>×_Øw3&ª[{è/ÂG‚I¯¶Z_×_ßZ\Hsß}÷ùÓú>ð›Xs‰EDi‰B@!н ÂFîP} ›¾x´ûˆ•¶¥+»2•‰óÒlÝXEÝsÏ=[E«at%ØìŒÊÊ;1ˆmsùå—7[DWßߟØw50ª\ÛÀ’¢¹ä’KÂ5×\Ó¶²²2&h3[dCQHš©SwÞyW|ÞÓ µYyè\ï!€U%»6Λ7/î uá…Vƒ6÷^kÕ"! „€½ƒÀ  lRëšÁìÅÐaÓ;°Z"„€B@! „€½‰@Ï6©u Ý8Xw‡²!,ÂÆÐ§B@! „€B@îD § ›,²f°[×0 EØtçèZ ! „€B@! „€0z–°É"kóVÞÖá|аñhè»B@! „€B@îC ç›¥6VÝ{©°Ô†£A{0ï åÁaãÑÐw! „€B@! „€݇@O6YV5¹ÈCB6ÿCBß„€B@! „€B@t'ž°©eQä¸A·æÙ[gugôC­daÓ «H! „€B@! „€%„M=’ÆÚ¯˜5†DßO6}ñБB@! „€B@nC`À6EI–UMía&¦6>º*„€B@! „€B ¿èj¦ I"jŠ '6ÅpR*! „€B@! „€ý…@W6eˆHDqjŠ!6űRJ! „€B@! „€ý@×6µvz2€DÒ}аi 7Ý%„€B@! „€B Sta³é +„¥6•ÙvHš·ÍŠnOÚñ)¢Â'E؆J …€B@! „€B@ô ýJؼtïUáÙ«O Kn0*¬¶ÏÒ™dâÒ´~\4KØLš4)Lœ81,¹ä’aÔ¨QaÈ!­¯¤rmB`áÂ…áå—+ðŒaÚ´iaêÔ©m*IÙ ! „€B@! G ßÈš—î¹2Ì~ü¶°é ËWÈšÑ}Z!¢¦-=h”°¨Yk­µÂèÑ}ûª¥•SfB ÃÌš5+Ü}÷Ý"n:Œ»ŠB@! „€B 6ýFØL¿ä¨X³ßöHXuï¥úÔ²fÊ‘õ9§ƒÖ!Ða³þúë‡UW]5VbÚì1aú¨5Â챓ÃÂQ¡ÃZW¹9Í<¯©”Dä#€uØøñãÃäÉ“«‰xàpûí·sD@IDATWõE! „€B@!Пô acÖ5+ï:7¬¼ëœ>íg·§ûÏ~¶Ï9´²„'kî²q˜½Üæ­­PÉÜDØ”LÉk"°Ê*«T‰‘65¡ÒE! „€B@! :ˆ@¿6f]³åÏ^ìÓTYÖô£meÜ 6ÞxãX—ÛFmæŽ_³mõ*𱛢H)]Qáþûï'Ÿ|r¬Æ:묶ÝvÛEªô÷¿ÿ=ÜrË-‹œ× ! „€è.:NØŒXzrXóC/EFŒÆL®ÝÿáîB¥ÇkÓ*ÂÆ\¡ÌÝÉ»EeYÛ´Š´éaƒò0räÈpõÕW‡_üâË.»lØ|óÍúë®^|ñÅørI<•Ù³gw¬y;6¼öµ¯—ï½÷ÞX¿¼´Yç±ØzÇ;ÞÖZk­@^O<ñD¸æškÂ=÷Ü“•¼Ð9b-ñ7a„ðì³ÏÆP^D•N6ŸùÌgª„ÍþûïfΜÙhÕ»ú>ŸSN9%Zí¡xuŠ@xó›ß>õ©OElÎ<óÌpá…v5N­¬Ü†n¾ô¥/Å,¿õ­o…ë®»®•Ù·$¯þÿÆ x`Ø`ƒ ÂĉÇsçÎ =öXœ÷+K,±Dt«\o½õ✄‚Ï<yöÙg‡«®ºªpñk¬±Føô§?[l±EîÁââk_ûZ|á_äbÎ Vt¿üå/G%$Mòüóχ¯|å+ ­†Š°IÑlÍñ·¿ýí°êª«úæ€hM¦rÌ„ ð|ãß<{¿üàK=càm:I6(µßÿþ÷#©’׿~ˆ®zœvÄG2›³î™U!IþQ™Ã™Ë!†j ÈÇ?þñð–·¼¥š 2廌¼ûÝïŽy÷@N~øá¥Çó+¤Tž@@4"F¬KìÉ'ŸŒYðÉXnF,ß2„  „2eÊ”ÓÍâ r~£CŒ‚ë#"l } ! ºŽ6C† ÃÆ-†/±B9aõ a³LDeþKO‡9Óï óž,,xáɰ`no›"wËPèO š%mz‘°Á’Âb™e^y6æÍ›07n\3fLuè”%Iª76ñÓtLÔ!l¼”© +»´Ï”™^x!*M¾¬>“¦ˆ Ì@,½ôÒ19Ê/ôXÙ!Ä 2–e-“DØériÞð†7„#<2Þô“Ÿü¤cîP8Ø `ˆ äøCG­c¡uþõaó³Ÿý,Œ?>Ö"å–[o ì·þúëG+Bæ Àï}ï{ñ{Ö¿7½éMÑzÅæÈuHÉGy$<óÌ3qŽbžcn2ˆ€|Àê&KVXa…pÜqÇ‚ {)KØànöùϾš:ìþøÆ7¾1Zíp·˜¢qÿö·¿=&§½7ÞxcxðÁ#cäÁø’K.)šm5+ûÛߪc·z±‰/–oÂÆ\¢øMÆÅˆ>ífÙxã«.Q6©ð{þë_ÿ:ža“¢£c! „@w"ÐvÂfØè%Ãð¥V cÖÙ1Œ^uËLf=ð÷ðâ]—†yÏ>æÏš‘™F'[‡@»›z.Q¾Í6½HØì´ÓNá½ï}o„è¶Ûn ßýîw«p¡°ªŠ"Àªk§Åá¨£ŽŠ&ï”ÉK«‘.e›cŽ9&¬¶Új±ÚW^ye`Œ‚¤ÁA1*âS¿ë®»†Ýv«ì^V‘G}4º}Xì@ÜÉüãq5;ü'¦ P%’avÏXB¹Ûc=b<ˆ·7•t°6€÷óŸÿ<>c­p+iª32nîÂfÓM7 G}t¬ Ä8îBÞâ…xÄ-Aˆi³÷Þ{Çïé?îÛf›mâiÆ6óÚïÿû4YQäèó;ï¼3Ö=M‹µ®MÄÚ!°%ÏQ-!Èç/ù˨àÃç>÷¹Zɫ׈+¡‡2è•iKP´e›¢yZì“g·#ê{Çw¶êâw’ Ìô3xµÇ>ÀB„>fŽ©%[%vÂórß}÷ÕJÞ’ktŒEUê˜7†óÆ™J€Ï Ï>8b]WK°¼c\ |Ï DÍÃx@>ö±-‚1î@f•‚Õó"Ï\=¡ßp«¡®Ìä=mÚ´êmÞò—,, ‰ùÄ|V†°ÁÒÅâ­ðü@¥ÏÖ7Xá E­lüX7‹Um@å‹„œkÄ¢®,aÃXcÞ¢ïYÔÈË·Œ…M^^yçéWbÞ0'àêÛŠ˜dä…¥<»~¼äÕ#ïuwLš÷‚W/]¯@« :l1lj—š}µÒ¦ aÃË%;ð@°¢ Y@Ð\£¼0³[/˜©ä6ÜKÀB^¼‚6^vÙeÕÛÙ]kË-·Œ±U¬*¬ x9‡Ä±˜-vÓ‰'žÝŸ EÌ}Á®ñ‰5 V%È7¿ùÍÀ gÁ%å#ùH¼…è¬à£YiPHV¬¼ ÿ¼²¢J _^Љƒ%lPŒ,î±€²ÊÆüŸ—Q,x>üáÇüóþ¡¸Ð7æýYA „¹Ã;Ä4ô{=…6&|õ_³„ 1Y%çÅÝ„¹Œ•rV¼Óö{…•Ø.¸¡üÓN„{QÔÀ=o|ÅWŒ¤—Å&ðåЙ•üTq@5“ù?ýéO±, X?þñá§?ý©eWÊÛvÝ. 3&óêÆÎP6öX™Owߢ¯°ºAË'þ…àñ„+öeêQ„°)›§µe—g×E//ì‚•¬rvAfÚÁý܇•FžI^?£ˆÓ_UÎÆ…YíÕrƒ``G)\gŠŠï?ˆgˆvå2K<òaž?ž,?þ÷O‚nƒÍw¾ó`ãš±ì ]~Ë8fîð‚{$c’yå™gÈ,IÇ5ò„ '®O–ð‚v~W!êÅXÆÚ„<öÛo¿>»»q‹Â+nBfùçËðÖ-)‰ añGpa+“ߘ²„ Ï}ƒäùfÜòÛG}!êͽäe1‘øþÉO~2’q|÷Âo.ýŠð»Áï[1b¥–… Ø3_Ð6WRxë kÍôÒòÍ"lèkÈL°à>Æ}ƒõë‡>ô¡Xý4p÷¹çžçy~_ Ç36V­½xŒ·,RÐÒä}Ò.êÄñ‰¸cDÊ/ÂY‹aJÚä6f C>(?^)C)à/K|PnÍR%+mzî£ýhxýë_OCB¥+¥iúôå ¢†—>È¢¬À‰¬¶Bð¢GÔe›ª FVvmKѼ`³(xï|ç;cõÚ‡2oî ?úÑ2w#aÅÕÚˆbÅN_E¥Â²ÁwÌ*— È.¯°¢(h×í“>'8¨á´óĹ€xñ‘]³OúË-?nPþmÇ”tâ yñ„ „Æî»ïî/÷ùŽÒ )“’B$¢l—ÜLR+Ÿ7mÃ2É‹'l|ZŸÆ¾gÕ£aÓHž”¹qTKØ)ç„Nè“„gçÙ+`}TP8±¬ð‚rMWÛÆ_³ï騲ó|þæ7¿‰ebUÈÎH©àÚÂîmsñnŠˆÇë¢Zh³êçÇ¿'lÀ &Ož«ùAZ¶WF°°;­2§®X×d J-ãšß»<å5ë>;IcÊ7ó–·Ü`Ž2‹%¯ÄCÀ1¯3ORÊe¾Å˜/`oäµÍ¤°ü9övL}!l<áDÌo…hmäÓܦ˜Þÿþ÷ûK™ß!ݘsø=­µk“7æ42—#Vò:HZs^eù¦„ „õyÛ.W,”@È!)q÷«_ý*ØÌ¯XyåÕ¬3Än+#Œÿ¼ßòaŒSW~£M|ä÷¼ç=vºú)¦ …¾!  m#lF,³jXò͇¶®1ݲ™qíÉaîÓÅWøì^}C U„ ¥·K›f¬lȧ iÓaCÊâÅ_M§·Új«¸Õ¥½dñ"ê·ƒÎ"lx·X,X%àêa ¹3Av\zé¥N^.yY÷ÁË6Ä>8±¢< ­¬¶a¥ Õˆ!ƒ›ÐA+^TyqgE/Ð!m#„ 7Š ˜ä­à“Z„Ü»ï~ÅÊ.žHþa9c+Ǽd§Éi+ÕHJªÅ“5þ5JØ ÀYÀößÿþw nn+ĹÀõ…z!>^„WX¹Fc…‚õÐ䊲Š2Çj2’*Œ]H+ ž áA™Xj1·Þzëj| ÜRX7ñ„#*å¢X2°žá9±¸I(_¬üs?mæA!dü›Bmyš{÷š%]ãÓ+ü“ìn¾ùfã3„ÂÓh=j6æ YpÖYgUû“1É‚2!C€nëëTY6ŽZÂÆêÎÜÍ\ÀüÅüË84ë;~§,.’¥¯õéÛÈü¼Ÿ0¾±àeQI‰66µPÕ5! „ÀÀD m„ÍÈ‰ë† ;|% 1ª2 +;EM¿ì‹aδ;KݧÄŰÙ,E×çb«ÇS&âO÷ùnnQœìiÓ(aÊ% «W(Qˆ±˜áe—aïŸ6Xu`Ý Pÿîw¿‹ßퟹõðÒ‡R’’˜6iS”°¡^¼P›ÕJ/›[ʞº&y²‰}S–Ór!lpi ðe-²É[ÄðR ©–'(À˜Ü#y;\CAŲ¨EºT%llµ•üPÜØÖÖ Ä‹Y_/ãñ +Ê –*>. „äc ”Z[¥ç„{& Œc³Êð+Ä)aƒkÊF*Ö.òÅZ榛nê“Ä s2«Ý¦¸å)›^áGù`5>˽ªÑzÔ"lÍ“@àF>éâATµí˜i¿)Š.<{Vô½,ÚŒ´ØAIC¼ÛQJ¼qç׈ƒ<—:âE!g^¼¢‡UZêºæÓúï¾ÿ8ñá…}—]v‰§R ?þÛŒqæ9H«æh/XDØîG(³Œ{o‘BZ?.»Y$•ÏÓ¾C¤ÐGF‚B@>y1ÒÂÄáb\ (ì< Vànʵ'lHéÄïñ¿þõ¯*AÄùT¬¼¼g(MÏñÙBë#ð©e ããùàb”·sybEQ‰¤®\ñ¤ûW tIùjÏKacsJJVXãÊ †å ›X¾Fذ£îÍÌ­<´?ý}-JØ0gc¹Þ&GŒ[â·ÕžiK“õ Ñf®ÑÔ‹ù$uë5BŽûý¼ëŸcYØd¡«sB@‡@Û›fÜdÌjcàÁ90jÜJ†·ƒ´©Eþ4JØ@d½ŒBb@f¤j=aÃvÛ¸ !y„/ùlÏ"fÊx¼áÕ¼Ä"E Oò¤/ŸäƒB`/ƒ§‚U‹_1åÅŽô)‰aÖ7sæÌ "yÒaƒe„!«†Yn¦ÈBx7³4=Š).6i¥éxQGÁÀêÄ”Ù4MÖq#„ «íŒ¢Î”É4,U `Pº­^aÍ[iÇj‚%U<½»+ï(t©l´ÑFU…÷ÏþsUñ„ dOV¿SWsdü 'ßvvåâ9ób®Œy³Ôñ׽Ÿ§ 5S<¦™<}ý³¾C°2î.”(ÄcÁ€EõÄ+a(ß^)´{Í¥‰1!ã‰iÒxÅ<+ˆ.iP`CeâmøþÃò·½,ÉSàýø‡°·¹Öcçó„,³UÄ*aëíT˜sp=DOŸ›4­?ö®PY–FÏ6{­})©‚ÅÅîI #Ñê¹ 5BØØs—d¾­|g<™b_ÏŽßIH „X\XAå‰wç°ƒH,*F¬äÍX&Ñ¿ô+ãšù´ˆX¾2`j‹5"Œ;H¨TŠ6ÞuÓçáÉTˆ?ÀzâwÚb gí<Æ84×J,oŒTôs…õ«/wöˆè»B ûha# ›îíüV6´´U¤¹F-±é^a\å/K%lxɇÀHK"‹ÏABÜÄŸ>‹4á:±qìE쪫®ê£Äßo»!lX­Ãô!~/÷©`a‚[Gž z%Ù¬SR·(k+® ¦hfåÙaÃj9ñQxaGÁÉ’ÉK'SX=M­Sü=¾Í©¥€¥µ6ñrnî v­Ög#„w‡ÂjÃ^ˆk•c׼š÷Ò6(Kˆw[@Áb·Ö*: +õáÓ[gxÂ'ÕTÞö¶·E¥€óÿùÏâ6éiŽ-(gjö<"Ë+ü>ˆ/§™zä6ÍäéëÆw\w&ŽUDŸãäÖ[oíó좀aù„0á~á€ìÉÕ˜àÕX¼˜k…¹úùë|‡À0KÆKJN3ŸXœ(”ºÔR*ͯè±ï?ž[Há,4EyE°¢²ÀÆ~üCXܘZV'ž  ü<Ò ¬è‹¢„wCÁ"B-%Ƕ®¸Ÿ1¥yA’Èê‘6X‚àâRÏZ¥ÂƬÇÒ2Ó¾Á: Ë.Ä[à¥é8æÝÁ,V_6gg¥5ë!®aÝ^Eň•<ÂÆ“äËÎI?f€>­}‡H‚hGøÍ΋?déý§ÇF„GFß…€¶6ŠaÓ½ƒ¢„ ­m–´)BÖPN#„M­ø)¼Y\¬˜JÉYŸ†ïYsNš&ïØˆ•<ÂËI~K³âñ›G<#0K·w·|Óréët·%KS„°Ü3ÒÚîóŸfäÇŒ¿ž~·ÀμodÅýJÓûc6 }B@ôm#l´KT÷v6´¸QÒ¦(YC6ÜÇ séÁª3Ê â OØ``;¨ðBˆvjÙaéó,:P¼xã³aãƒêRdMºÊ+[ùùcýiçü'Û™§ «¨¬.²ª‹dqQNPbkI#„ •¹/äõfù¶Ê›—Æê5iÒ¤£€ã4P´¥™ì,v< g×k}6BØxwVG‰»QT¼ÂÚm„¯ã—¹,Áb+¬¨¼kñ@XéGx.ˆ‘ŠWøó›fê‘GØ4“'sÖƒ&(ÆàiÅ'„’6œã¹eõë¸,¥‹ˆaòô+ôÜëãqlÖÔ‡ë(«é.OŒ-‹Ki‹µ`+Ä÷_­^ñõn¾Òú`™âÉ+»Î6ÑÌEˆ˜vÍšâ[´ðnA@ÌÌÑY‚;&„[š§ÅUÉŠ¹c„KJØØ=56yb÷§îVyé9ï‰ú(Ï É,aÒöäåmD ¿-Fœg¥-JìdÝkÄJaÃ=<X±ñûu¿«^ÀúðŠ *Ö4&–¯Û'$9¸Yn~ܦHfÅTÏí¬,acîuEûÄÚÁ§†¾ ! z¶6öÃ9bÙµÂø-*¼S;D=wÝiaîS÷Dtù±’´Sð[t8«veI›2d å5JØ ¤ÜsÏ+cË×›˜ ìÁxó«dFÀ@x@䘥÷’YªX^æúà ?JHº:ë·üÎ#l|0S”_Èš<ÍÊ-ûÉ 1@QÀyIååâ¬@ÊiÞ6lGÎ &rÁ„ /¼0Í6 €¡¸Ö‹¡ÃÌ/æ’/ˆÝ8lÈ”¬Ø.‹TâÕ6Þ½E¥ÖŽ/i¹^a-CØYÔZ¹¯z.Qy6;î¸cuW`5mC­cSܼ;–Oïþ<¦™zä6Íäé•aÈ\9ï¶à‰ª,ÂÆ·Ÿ˜Rì’ÆsI°Q”yS0­Ÿ™Êî¸cùáiî“|Ï#$,}ÑOßµ\¢˜Ë6ß|ó˜­wAñãŸqÈÜŠ›œŒÅ{ñÖ0é®S–Ž9¸ÿµ_ۼϜ a–'ž@#è·ÍÏŒܹÈk7³Ø`Nç·ñÏéÎ;ï\ šŽë±¶ò¤ÂÆ»ÀAÌßxã™ÙiTtlY°[0%@vÉá-ë|€õÌ dœ´q_‹°ñ·ÑǸŒî¶Ûnaƒ 6¨’7Œ%úÓÄòåÌù 7Â4Ï-­aSk|ùñ’7Zýì“1¿é¦›ÆC~Ë꽫Ù}|аñhè»B 7h a4ü€96Œ|Ízaüfû…áK½²–Û¼gÏÝpf˜óÄaáœâ V^Zov6Ô®(iS–¬!ïF bÓ 4x‹ã+ñØ2;µ01ÂÓ|¬4ÓTÅ:I]ŸYß}\ŒZV)Y÷Ö:çûyÍÇÌò÷å¹èdO.-V[ÞUГ XRT!žS ¿]užB;©¹ÃB´sÌ1‹C¾ öÝ,&  ۹۶Ͷ54;Z1™Øî‚æXDA¤ð×’F¿e}žûWÌ9H½ÀÇ1QåŸEtå•WfÆ<³Ø<ÜCÀ\,ªÊˆ+E Ÿ·øí 2ÒX¾ölá2Æo“YÊñ›n‹–g†´yñÇ|ÐaêãÊUOFÌB:Òÿ©`¥G<7!â÷L™2%&a“"¥c! „ÀÀG m„A3lô’²f•0fÃèU·´Ó}>g=ð÷ðâ]—†yÏ>æÏšÑçšZ@'j]´i„¬!ßF îE±çåb€Õ^Lá Œ`…ðÇ?þ1~ç_JØpŽÕ8È{ÁcµÑo%ŽípBz‚ûB!( (Ü›6ÄYà^{±§lHŸ,!^IC'+]­sÇ{l5¸'éXù«@ÒòªEØ üØJuJf™ù@h`i3¹âºD»±H@P*§N¿óψˆˆ#m°H8øàƒc:H-Ú&¼Ä’AŽH‘¬6âÅœ6deÄßé?bYx¬áåñJ|–Âêïã{V ηj[ï<†2P³Œ_ȱ4X-cåÅØ=ô… ˜KgÅñ aC~Ö#°i&OOØ0æý³ÊX&9Ê âûš¶bÙS3œUQb/¿üò˜Æþ±•µȾ喛+nWÇÅK~ìä­ÒóY`ꬭÑ!iQfóÈ žâe`¼ˆs­Œ4JØøvÊü÷¿ÿãRð²Nà^\ìnXlŒä)¬¾Îy„YKÙØAÙ&;¸Pu²kTˆ°W^üMŠ&­_íe¼c•…‡²¹Å[Ħ¦J÷Óï<H–Wøk6Ö£aÓhžÞ‚ë¬&PöÁœç°¸2ñ„ d.äÂÁ]ŒqBkÁ¿m©ãŽ;®JÓÏX¨ØNQ¶lÅôãû*»ð¬T‰í„¤JjñV¤·˜<äK“¹¤ÔË«‘ëÖ7Y„]#ß´ßÀÅ)-Ÿ1Cð;eâ‰EÎ¥nMŒEˆ²sŽå_ï³aýEæYƱƒqâÄêVµ‚ÅZ^¸þ‹)UJp8¿¢XLKoŸ¯ô»'lÈ›y2Û¬®Hb‰åVB©Ôÿ…æþHݨgÚFð€Oȳ^3…—û¬ý”_–°pglz¡,È'{ÎqoK‰}Ÿ¾™ïæˆ5KJ®×ë7êyÔQŸ«Ì‘/Rò o=c‰ÌŠ&l€ðe^JÇ"÷CÜAbyWA˷ȧ+Y6v?.m¸ ™Õ™‡¤ç™ƒè3Z»fùbQ•º3^½›­>®Tê.en°XÏ0Ç2Ϥä¿EÔ%%Q­Nµ>©Ä`‰ñŒU/ 6Ö-{9ac¨èS!0°è8a³âAÿ[ºGOßy`#8k_”°áE…º—=( úŠ|³ÍÅjäò„Å*J³„M£åÖ»¥…!Î/¼¼X!¬XbÏK.ÂË,/µ’îC†~ó+ùÔ’çŒþF gE7UɳÞ=#“+qpƒ‚¼‚ôV5õî/rmtQ ÞI€ìzbgóâØÔ»?ëz#õÈÊÇŸk$Ov\óÀÅ· ÞÜKPmÆDCÑ{q"VÊ*Öz(m6×øöØw,›ˆëTÆiŠÖƒüSÂEAqå…eµ•ãŸv£°#ÄôñÖ!ñä«ÿl jˆb­´Z Ð íl wpÃu w5êd®²ÉÔËXþ´ZŠôu@ñgN>ˆ‹ZYtá·ØI`À"¿}ŒÉfĈ•Z„åÅu€Øg>J‰iK׎OOØXŒ%âñLC¶+ë›f…¼Xè€dÇ’1ߨȦQätŸB ÿaÓØ÷[ÉE ›í¶Û.ð"8e\Åõgñ‰ýVß´àn%lXÉ$°'/W V¼À™¿=çQ®xÉ—t¸¡Ta}Å(’Ö à·ÐNc˜´¦å’‡hf͇un™­”<¦•e¤yA`Í‚ó,.©©ëwšÂ­K’v Á´±3×­¼r¨«·ôÉK7ØÏ—!lú«,¦?ëS¤l6EPR! „@w! ¦»ú£#µ)JØ4•ô;†lf/·yGêV¤n%l¨;«Ø¸,°š™%΄ԑt'¬˜c¡BÀbb²HZ‡€Yy° ‡¤3˜;ָܕ±ž)RÃþ l¨VŠX2 !ì>Åsç<ö#׈Õn‹Fˆ1¶sÇ5 +Ž-¨:VKÔ«–vXØDzèŸ6¸lay†`©åƒÒwCs aƒcTd—ED1l" ú'„€èzDØt}µ¾‚E ›I•]G0;Ÿ6{Lxtå}Z_‘s,CØàâ€Y>nõÌ¿¬Næm]¸I€!.1”»n’îEÀâ×Áo/Þ½585óL Xšçià´f`Ô”¹Ï¶–f7VK6¸ësÌ1ÕȲÚEüÒÔÚ*ë>ë_Œ°ñµHwCò×úëû@!l Œ•c*"lRDt,„€èNDØtg¿´µVE *anQÝdeS†°i+ʼ§0ë¹Cµ¯[‰ãÄ®=æ=çœsÚWrް­;qµ°NÀ­B\ »U¸eú<·Új«¸•6V-ĶA±Ÿ:õáŠ5Ë]@ÀŒiâë¥ï#€õ®;^ìËŽHÝ$Æ=+ 3Î8£›ªÖ§.µße—]úœãàÒK/ W_}õ"çuB! º 6ÝÕ©MÂÆ¬l¨Øm£¶ sǯّ:Ö*D„M-tt­Œ¬á^Y×4‚ îB@! „€B Õˆ°i5¢ ¿2„ Ía›Vv$AºÁÒF„Mì ýkž¬a7›NºÎµ¨ ÊF! „€B@D@„Mvj½&•%lÈÏ“6/Ž˜œ·|˜=vrX8jB˶ü®Wo».ÂÆÐg£°[_”ODdM£hê>! „€B@! Ú€›v Úåy6BØÐ$ܣؕƒ­¾%B W f ¡d¸WzTíB@! „€½€›ÞèÇR­h”°±B n&Nœ°R`ëT¶‰”‚Ûü²s׌3b€T5¥çTO! „€B@!0¸a3¸ú;¶¶YÂfB¦& ! „€B@! „€è("l: ww&¦;úAµB@! „€B@!‡€›Lž<¹Ú x Ü~ûíÕc}B@! „€B@t "lº¥':X²„'kî²q˜½Üæ¬í¢E‰°Y)‡À*«¬R%nDÚ”ÃN©…€B@! „€è "l:ƒsW•R†°Á jã7Žõ¿mÔ6aîø5û½-"lú½ z¢ž´¹ùæ›åÕ½ªF! „€B@ÞA@„Mïôeá–”!l¶Ûn»³¦,k¬"l }6‹€‘6Ä´¹òÊ+›ÍN÷ ! „€B@! Z†€›–A9p2*JؘuÍ‹#&†»—Þ­k(¦kº¢'*‚±mdeÓÝ©F! „€B@žA@„MÏteñ†%l6Ûl³°üòˇ¢Ö5³¿-³‹-¿AæùFOаi9Ý—…€YÙ<þøãá†nÈJ¢sB@! „€B@Ž# ¦ã÷E s‡š2®b]³øÄÜŠCÔL¿ä¨Üë\Xbӽ¸Ê_+D„M+PT†»Gm´ÑFAnQ†ˆ>…€B@! „€èDØtC/t¸E ›wÞ9 2$ܸìA¹[wÏœrnx¾ò‡`I“’2s*dŽ¿>a§¯7ÝZ6MC¨ ÞúÖ·†… †‹/¾8¹¢C! „€B@! „@ÿ ¦pï×R‹6ïz×»b=§L<$·¾ž¾s¼¶âAµ]#v uš%mDØäv‡.4ˆ„ rÑE5˜C¹ÛvÙe—@Œ¨'Ÿ|2üú׿.wóK½ß~û…qãÆ…¿þõ¯áŽ;îèHí_÷º×…-¶Ø"–uê©§†ùóçw¤Ün(ÄÆÖ-·Üþþ÷¿wC•©ƒÕq0ŒÿEß#FŒ ,hù¸=zt´DlEþïÿþ/ :´OVÓ§Oçw^Ÿse>øà@û‰Ivíµ×ºz|ìc‹sògœn¿ýöB÷õW¢M6Ù$ì³Ï>áþûï'Ÿ|r¬Æ:묶ÝvÛEªÄÀ\ B@‹@Ç ›%ßrxX0kFDlèè%ÃŒk~0pÑ 5oac®PæîäÝ¢²¬mZEÚtаAÑ9rd¸úê«Ã/~ñ‹Žöök_ûÚ0vìØðüóÏÇ—²ŽžÖl]ˆË$ñp;â%ó’K. óæÍKJ*vˆÕ×;ÞñްÖZkEŒžxâ‰pÍ5ׄ{î¹§X©:MØüô§? ¸b/Þ½*ô rÄG„x #MýÌg>S%lößÿ0sæÌŽ”Û …€7¸CRxà]Ùöþÿ(ôŸúÔ§‚ýþà?(56¹ýõ×l°AXyå•#ÆÌCŒïÿûßÑZ¯ÌX ^Ü^{í-ZɃú”•¥—^:|⟈óâðáÃãísçÎ 7Þxc8餓&Z ;ì°°ì²Ë†aÆŶ½ôÒKÑñüóÏ/[Íjúßþö·ÕïöeöìÙ;näÓò½ë®»Â¾ð…BY¼ûÝï®ÎÃÌÌÝ,çœsN5jT¬"ï)W\qE8蠃Ž;î¸Hµ!ÊøÃ.r^'„€B`à ÐQÂfèȱaÄ2«öAgîÓ„s^èsNíEÀ^RyÁ¬%õ,lŒ€Áb‚ƬmÈ3Ï’ÆîÉ»^«>v­S„Íé§Ÿ_P¯»îºðãÿØŠoë'ѧ?ýé°æškÆrx1>ôÐCÛZf^æ­¨Ëá‡X L²æG?úQ˜2eJz©æñk¬ñYl±ÅI÷ßÿþ7|ík_+­,‘‘›EàlúJíYgW»éçã?¾é<‹f0˜ ¬Î>ûìÀ3‚¥À—¾ô¥¢°u,]6½ÇsLÀZÄä»ßýn!kŒ|àßŬùÇò‚(wi,ˆj äóÇ?þñð–·¼¥šìÙgŸÊwõD/Ì‹_ÿú×#á“•"„¹ –2≌¬û{챘/î¤eňHkÉÏo|ãe³ê“Þò-CØìºë®K@d 6§œrJ¸êª«â¢¿LX(ADØ"úB@ \:BØ`I3|ìraäÄuÃÈe×ÃÆLˆˆÍqz˜óÔ½aδ;üž¬ZÞ \8FÍû“°¡fI›^%l iXõõÊ@6­¨ &ïoxÃâCÁ ý´iÓ⪠–%ç X ZŠ+¼¤·Õã^x!Z Mœ81käqï½÷Æ4EòóiDØx4ZóEt«­¶Š™pÀ±¯Z“sý\3a: ï}ï{#PG}tøÏþS´¦è/Âæƒü`$\ J¼Ô#lVZi¥HòL˜ðÊ» ÷ΪéUv–{ôÑGÃøñK„e–™V\qÅ*iÂüayá…ú¢ªßWXa…pÜqÇEK»êÉÊ—²„ –˜àióâsÏ=þö·¿EëPæ5³Ä`¾Är¤(¹‚[áç?ÿùjÕ°üxÅRòo|cµ¸Û|å+_©¦+úňêúýï¿èmuÓY¾eHN¬ˆè¿3Ï<³c®›u““`ã7®ºDAؤB{ÌÕV„MŠŽŽ…€¶6¼ŒXâ5aäkÖc×Û9Œ˜°F&Bs§ß^¸ó’@€Ú¹Ï?Q}ÈL¬“M#Ð.¦žK”¯x3¤M/6ûî»oÿsVi1?ï¦uA1‹ Vw:ꨨˆ0¶ß~ûªÙûÓO?]eüØÈûΪøj«­/_y啳pd̘1‘¤Yb‰%âñ÷¾÷½Ò>û"l"t-û‡BL, Æ0«æw”ÁNØ,¾øâQñ¤ 1?÷¹Ïuþºeuš°an`>b^2Yj©¥âa-ÂëÈG#y®¿þú@zÜRÁªìï|gœßŒ@¹ì²ËÂi§Ö')ý!O,OHêÈqYÂ’ÿÍo~sÌ?}Ö¨ÏÏþóª5Ñ/ùËð›ßü¦O]ò¨3nVÈ_þò—èVeiq«?ø<òÈ#v¹Ð§+Ý@تðJ$Âfu–ª*„€(€@[ ›¡‹/F¯ü†0~ó…!#þg~œU¯…sg…ç®ÿY˜õÐõaÁKOg%ѹ!Ð.¦lõ%mZAØX<•3^‰§”U÷f]¢xi¢ÌÆk­jzr%€ø{î¹g`U·Ó„M«êòÙÏ~6ûaÅ’~/'œpBXn¹åâ)LùëÅŸA±BA@^|ñŸ^ýG< \É\ý ˆÊH+,£&Ož•@‚ëÖŠ›’¥°2^6Üpè°=ôÐC…«¢G¹(é?üpx¼²ê_k¼¥£Ôá¶öòË/‡[o½5½ƒb^ÏØÛ|°n0SþBš E•ÄzõÀR`ÕUW­X3Œíä¹Ê“¢„ ˜—m›•I¯»îº16ÈÝwß]sú‹û¦N­3,¿zŸX’Ñ~Æq p%©%Ä­ Ï ÷ØcZI[r åxWË,³L¬Ÿ¹¹dež5þ³Òå£,¬! °p©÷¼l½õÖ1¨,ùA cÙ‚•ÈN;í‹È#lsgT‚Ð26™›IaSO°za^„¾ÀÒÁäg?ûYÇãrûío»Jp–%lˆ#1Š@ÌAÐyyÛÛÞ>úÑÆSXÙ`eTO˜»¿úÕ¯Æd´?%¨°¾Á iÄʦ,a–ô÷wÞYÓZÏò-caQâÏþÚk¯çF,DkÍõ%²¿‰Ì匴Ëä#¦ ZJ+„€è~ÚBØÄU—¡ÃÃÈåÖ Knõ‰0|Ük !1oæaÆß¾æsÕ?ÿùÏ€5%mòbùf6¸è2WSgîƒØ7ÈnÜ×úÃ8>÷Üs£›2¿á¸¿ˆ06ÓÞšúayEߥ¿wÇ{l xן"lŠ ®4B@ƒ@[š?lÜraìú»U\¡v)…Æ wü!¼pûaþÌÚÁúJeªÄ}h5aÓLa*V–´)CØ Hl´ÑFqG"^šyIÎ^SÒ&°ùâ¿X%kX5õ;Hñ2l«¶i9(ÒV”,#,x)ÅìþÒK/­ÞbäC36ìq„ À¦'QŒXÙEþñÄÃVÕå§•€ÍC+Êà 7ÜPUšbAî/¢¸  üûd—¤úÕ»iAdÉûÞ÷¾èŽÀ5ˆº<2!ëÞF ”1&S.³òFæ%ß[˜ÂÊx žàž%XË,”-z½@†@@Áã¯Ùwˆ@/ô7D#/úÔ k“”°±´v=ý$¦;?e‰íbBÛ 8R±¼ëե୵ðýýïƒìú2ê6V¾¿ÇÏk[½ûh}ëŸe”'véÁB&O˜pm„ñ²ûî»ûS}¾cõ‘|$s…«3È $k,pÞú‰zCàÕ DÏ=ˆáÀ}X aù%\G¹L·I¶ñŸî’† D£õ7d.䆉'ìœÿIJm½õÖ‹§<À˜çüÅ_\M^°¨±€ÍìB9`‚¢ÏF6üð¬"ôÏ6å‚b»ùðÒéøC’¬†QFˆÿêW¿ªÆ-¡ /Ÿüä'«¿EÜ—˜¯,þØ!‡²Èo‡åmDPÞsné²>XñýäÓA†A‚xÃ_ç{Qdù¦„ Ö@ô›‘5ämä*óÌK*r„Åp¥Ÿ!Ò vòêCó»“EÞY^YŸŒ,8+M:ÇA™ë{ÞóžEna³$:!„€Ð´°9áµÑºfÄÒ“K4÷™_±²™^,i©Ì•8"Ð*†̈[ƒ¥M3V6äS†´i„°¡ …„—v̪yÙeL{C‰À2Á$‹°ñqTþô§?EwKÏ ®Åê@Ciãe”Ï·¿ýíÕ,Ò{ÂÆî÷Ÿ­ l°Ø°O\’pMò‹"æò«ŽO=õ”¿\ý^¶.fÝqÑE…ßýîwÕ¼ü³ÂÈ@A¨%˜ú³» ¸æ­äb½À 3‚«éE¥†—x”0³pg”Sˆ2êcDÌ}÷Ý1¶ú˜ÂjÇX\]~ùåq[`¶ø…|²eúÍ.E „ˆ±ë(©(”ôck$‹Ÿ‘*¼¦h[¹(ÿú׿"iÆ9òA¼‚‡¥c ‚]²e´Ÿ}ßùÎwâ=ö…È,¦ L‰µë|©Jd˜z%/êA@±š0 °Tð–µ›FÛF¿˜«ã±Í–ÉàŽ‹‚âæ­)¶Ùf›ª +æ“(“`Ip`ú¹å–›+Á[‹ßùç•qÈ4ÚÇê?GôŸãŽy'%õÀ€þHc›põµ8S3ÆÒ9‚óY’ö ;u£Ž´‡¾1‡kÔ…×ÄÆ¿'l°ôbÞ°Ý›˜7,N÷aQfnã¾GyŒaž_,|l}ÿQ?”ë4æÄ”¹±¥>6þ°ô< ²¦RW7S¼­|oJD’Æjç\Cê6€b>>‹' H‰Sƒ’Î3Áfâ Ȫí¶Û.>û,yÒaƒ–Tåx+>_Ž)EÀvžßZbã&%Ó{|Àc¬‘RKª4½?¶þÍë'sEb~J­ô÷ü~@¥Vœ–¯Í«£¸h2BªQO®y)JØ0^éCp18‚4²…È.ÚTDì'mÖ3ìÇèÍ7ßçuÒŠ°‰B`ð Ð6¦7³Ú<ÝÐÙ–¶’°¡æí mj‘?6(YY/”(‚˜¿/¨¼ÌøjœÚå Hb yd ÅÈ‘#ãJ* s*X=`BŽ4CØðbˆ¢—'(Ï(QñtÌ¥%ÖÜ¢¼;Ô\ÐÇ‚#Í·,aƒ5–ÊjªÈYþF`QW^Lk n(Gfå¦{úaÔ»ª¥iÓãF¯ˆç)K(‘Xq!gŸ}v´ á»)¬|'®QV]SàR³S²ÙVxŸÊÊ– |o»í¶ñJöŠ6õÇB$âž°‚@ €e*˜ð£”"÷‡}œôž¬ã<ë c$K±ËÊ«È9G¤…pd^ÌHY‹íƒK Å±±ñŸÞé…B*H­X;Þ/°¼k6ÞÂÉ×ÛžIæïÔÅÂÄ‚Çzò "8ÜÍl{m«ƒÿl„°¡æŠU‹8#¾‘{°Fjøòýw~qï«5OÞ»dyœ|^yß­yýD€_~#,F˜ÏË&O0ÃU׋åëÏñ=uGô׋6ü~†þ^ûnAõˆ9Kïç(~ÿ f‹Š›¢H)B 7ha3tô’añÕ·ã+¤My®BÖ¼tß_ÂY¯Äz(s¯ÒC ]„ ¥7JÚ%k(£†û0£·X›´k³dvæA¯tŸO½ïf)Áœä]’êÝWïºõéj)›6Î!¼ëŸç~æQÜ3yÆž pôØrÞ⪤1¸fâ-Nj¤¯GØX=B`k\œÈ ÜL~žW#l+lܧHÇùz–-6BH­öÑDº"»9AxÛîSy±®È‹6SÍÏQœ/"F¬Ôª·Ï ™—vÛm·@œ,Ã:u µ|¹—Øb´Ý¬tªBÚ¬Æm²gµÒëj"öò#7†™7W!kî çýoÇ‘š7ébC´›°¡REI›²d y7Jذ«‡ÅV!,ŽÿêWÃÐaÃ"Áâw¢1Â)‹C€Uüâ‘ÔõÉ2Xá@ à–ƒ‹xýJX§ Ü8€‹Y7/ž¼ÀN©œýaàˆõ”O‚³ý³‰•Çq–[”2Š9¸º ¾féDX˜âƒòÉ‹8Ÿ&ljrÙ>—:—‘F\Ɉ ä&Î äÊ+°ôÙ”Á²„ e™’T+¾D½ Ãµ”Æ©mKÍö[ÝS~=ᙢ¯bNdYX™Â_«¦d’«¬•ñ¼ºä6¶ (s?£³AH*È*¤acõf\œUÙ¶ÝvG²Ý¥|yl»že±gyÔúl†ôª•¯õiòKû Ã̽æÒÄ=6þ¤è§ ÉRzÁÔæ^”[Û:>ÞPùçwGã\=" acî\Þåk,¤—X—1×cÅh»Q6V–¸¥bQÅN|Ì» Ü“'6ž(€cL‰.žGæ[0.J.û-åó,‰ýŠxŒâ‰ÿŒX©×OYYy×7C–Îò5"×.æ#,ñ¬ÆŠ6Ü[$èpQ‹ïv—G$Q&}ʸc³ a2! „ÀàA ­„ 01:ŒXrRµê›ÃèÊßðq¯éƒî¼™O„Y\^®üÍ15,œ›o‚ßçF4Œ@'*W´i„¬!ßF îýïÿWÆQê7ß|ó¨˜Úî$¸¸ |˜¤„ ç!:ôh/õd»M,ql7)αódÂË:/ÕÜÛi†à¾( ^XŸ:uª?•ù½acÄ mCÁæÅálPÀàÄÊâìÐÅ1Š6„æ÷&lgmVN)!f–P¤¥”XH›É•`Ãàn– EÛeeòÙaã·ÆÜ…Œþ5AaE1E‘”q3Ý7…µÂÆ+Ë)*^¼âJ¹X6˜…“ÝËù< 8s™3 ©ÔZ W s á¹1%‰zx%’þÍÚ2¾H=PTpCB²xÎÓïf „»Ï’GØ4Ú6”>H[$+–ÅJ+­w˲yÁ6Xh@ÚâêB¬,?FÈÏóØÜ·ˆýD~ôÏnº­7®NXW€7ñ/Ž=öX²è#f•’&–ˆzƒ1ãÈëÚµZŸÖ¤¡~Äõ"(¬Æx#©SÖøG5K@îIwÛñVG\‡ÁbëvDÃJÇðçz=" aC_@JÐ>Æ¢ÅJñ½)Ç„þÁÑ\gí<÷ãþ®%µÜíxމƒÃoyš@ 1¿#`bÏ®]gž\}õÕã¡ßšyù2—B0›ë¿yÛûs7’*/VióÄæŒ¬~b³¸f¸’¥m"OÃ+µî±|!ÍØÎ!ˆºåÇqÚž¢„ $9;I2/šÐç<ï†ýiSD,¸3iýœa÷ú9Üì·R„!¤O! „Àà@ í„M„±¢¤ ³l¶øÒaøøâ'çç¿ôL˜÷Ücñsþ‹Oñæ78PïçVvа¡™y¤M£d y6BØ $ñ‡â”%Y±V²î]wÝuc,ÈòdÕÍ,'x‘å˜;ìe—{XÙd‡&”#VÏ;MØP,XíEXNƒfÆ ÿj6¼¨Û‹*JŠ˜‰ Ä90VÁÇ,\—ÀÊ‘’Æ^©E/ÚF°¡´§ïS”<⦔•Fú—ö³SÃŽI(Xý˜)¾­öZ½²V»fŸY.Q\#žÇ 'œP_a(=àÅÊ8¤—)®éj²)Ú`—GØPÊ?nJÊŸ+¸b-ƒ‚G,!¬µò!.„ïCΛ"Âj?10R)RÆ$ Ê‚.((ìô¾Jpã•&MŠ×R¥-°!q£mCAe "ô1ØBÞ‡7ÿ¼{åëû•çÎêùH… ýeeGâF1@ 0VtëkÛ]ˆkÌ3(ãP{Þø÷d#ycanGcÙ„µbžûÆÊÍ"ü}õÆJàÁϘ2”@ÂÂ"‡þ„à`¬/Œ@Ë<ƒ<“ô äA=1"+†íNE©E“ߊç‘]Ùxþ(¼lnãǾjåËsmqÃè_véb·2Æ-D„!’>{ñdF¬dõ¿£XQÙó®s‰c{™ç\*¼ü[| »fŠ6xÕ"lÈ-ÅÙò±OOLØ9>½»XVÐÒ¢õ€ C©DiÎÆ¸ ˜Ô"lm±?,8¹•ã?Qþlnñ¸`„‚›>þ^Ú wcÅMÑ VY]­%©õŠ¥õ`^|üΣèCJß´Û¶3Oïeœ1 Œ¼Ôÿì´dÖ[ÜŸ!fçyõcŸt×_} vöHàëP°!­w J‰ŸW+¾×"lìåd¹yr%¯.~LZšZù‚/$?VXy¸åYKƒDç¥÷çXÉë'Ü2½±¿×¾cÆøð$”å›6Ü㉈&b%AF!l jX,È›‹°º3 9«c½Ï"íLÇžoGF^X>"Xû(èp½^Ðu! „@w#ÐÂÆC0qÏÓ+;@=O =>L;ï Yß;€€)¶º”W$æàÈ”‰‡ä%)|Þ“6ÜÔ(Yý6¼L!(×¶Ó.y;°ÄÄ%þ±í4 ./€Y[Ó²ZøãÊj+&(O¦X”(¢+“‚'/«æv“V ˜M+VX\à¾kZžðBŒ²P«_XÝÅÒ‰mV±ò1tòòÎ;ß(aC~S1wš²Ê¦'l WpõÂÚÀ· w?ÜÁ¼…Œ•_oü3î o‘¬ñV.XZs s 1f ø¬}²žx±òÏN]d, ŸiﮘÇV¬ÏÒ81ñb“ÿÌ.+¨²Å¤¢ÁÞ»'Y±ôCjéÅ5~ /¿ürKZý¬—/c>WÙ’þ•ØLÕ+_¨'u)j•åïå»+y„ iˆ ÅØbž÷ÂÜŠ¥}“þîX¾XdÜ ¤)cÏ,MÍòÌ慎5±êLÌZ9×Dª”ÄÂ’ŠºxÒØî/òIŒ.Þ·×&ô5¿iÔ—ùÜ ‹#ÌˈŒ¾ ! zŽ6+tq$=}ç>Ç:h?E âTð~ã²…!C‡µ¤b¸B¬XY¥–Ve2o–°)SV™´¼t™¹>ñ P†LIb5aç ‹÷R¦ ¥m6ôÕÅ÷ŸÊ”F dÜ•P.èÛÔM¨L^eÒBØP.ÊKX6îÊä“—%gr%Naiî!(µ„{P@™?X}ϲ@«uÖ5”*à µV´³‘¶Q70‡ô…d2x3ð.)Ñ”Õn;Ùƒ% $.vSÅ®ûOÆÖfEðÇÉ2â ³ÔbULÈþI­çÊ䟕•à«(è(ÕY˜ûm®Swª¬<‹œcÜá.†Å$B¹àõ}O;©í¦ý…X®Ô[)R¶O1QQëÙ# ñÁx^©'„1A«3yR$_‚üX™qËX(…iFŒX©EØXþŒ-~CqÉû²V,–O#Ÿž°!Ø=Â\aÈüÁHc|5R÷ðnÆ‚X¥dTÑ|eaS)¥B@ DØ Œ~ji-‹6qäenʸŠëÏâ[Z‡f2ëVÂ% ÷^â^˜Y!ãåÎ|ñ9¢iÁ9–ô/(ßm´QÜN˜4’Ö €E ¸òà•Zµ¦å’…€'.jYdÝ[ä\aSä¾fÒø-Ùo¹åæŠUÒq}²CÑÅ2‚y˜1Wvg±>™e`A„µ¿‰õk \´$ù”!lòsiÿ•,¦ý¥6W‚›æðÓÝB@nC@„M·õHêS”°á•Ý;†lf/÷J`ÌT¯nÝJØPq¬pß`E2Kx‰‡Ô‘t¸Ü°"kñ@$­AK#ܺ 0!Â4î[ƒk‘\Ì®UÖMi™ýAØ`aäcq@bÅ‚Õó.„ n:XÅ´ZP„ 6Œu1Æ–­¸ºz•ÀÁŠ70vƒbm\h$ùaƒk.m®EÏ/¥ù+…°!VÖöÛoŒeÝßøÆø]1lšÊA!Ð߈°éïè‡ò‹6¼âC>mö˜ðèÊûôCM³‹,CØð’¿9îÍšpg×&û,D;…€!æÍ”QË4=;'m7¿†˜iÐäv—ÝëùWƒ ²X<ømÍ{½ÝýÙ>bZÈÌ àÝj×ÚÖ„ å¢`׬9—Jƒ$½®ãî@À_H8Æn7É@!l  ™˜Š› ! "l^Ÿ5]㢄 ™[T7YÙ”!lšKô4f]ÃʸܡÚÓÕÇsLÜ]å‚ .È –ÚžRo®léÍ.aYJq+Ùc=bPà—^zqפVä_+bÅàbdz‹%#äÍôéÓc,vôc»eI÷#@7,–¼`•ÄŽfÝ$”g·:¬€Î8ãŒnªZŸº;‰ Ó©\zé¥1wz^ÇB@!0pa3púªe5-Cؘ• …ß6j›0wüš-«G£‰°i9Ýç0²†s²®ñÈè»B@! „€Ý€€›nè…ס aCÕØƒÝYn°´a»Bÿš@À“5lÅÚIw¹&ª­[…€B@! „€Dˆ°DmM-KØpŸ'm^11<8où0{leëÐQZ¶å·Õ¯Þ§›zézì5~üø¸50ŸˆÈš,¤tN! „€B@n@@„M7ôB‡ëÐaCqbwŒ"Ûšv¸I*N”B€˜5VáR°)±B@! „€D@„MÁî–¢%l¬þ7'N X,Œ5*°…¤Dt3ì˜Ãn]3fÌÓ¦MQÓÍ¥º ! „€B@!a3B³„Í „LMB@! „€B@!ÐQDØtîî(L„Mwôƒj!„€B@! „€B 6yÈôðy6=ܹjšB@! „€B@ô"lz¢Ë5B„M9¼”Z! „€B@! „@§aÓiÄ» <6]Ð ª‚B@! „€B@ˆ°©N¯^aÓ«=«v ! „€B@! „@¯ ¦Wz²D;DØ”KI…€B@! „€B@ô"lúôþ.R„M÷€ÊB@! „€B@!P6µñéÉ«"lz²[Õ(! „€B@! „€è!DØôPgmŠ›¢H)B@! „€B@þA@„MÿàÞ¯¥Š°éWøU¸B@! „€B@ºˆ°© Qï%aÓ{}ª ! „€B@! „@o! ¦·ú³PkDØ‚I‰„€B@! „€B@ô"lú úþ+X„Mÿa¯’…€B@! „€B@A@„M”z,›ëP5G! „€B@! z6=×¥õ$¦>FJ!„€B@! „€B ?aÓŸè÷SÙ"lú x+„€B@! „€B  "l ÕKÉDØôRoª-B@! „€B@!Ћˆ°éÅ^­Ó¦n l6Ùd“°Ï>û„ûï¿?œ|òÉujÜ÷òСCÃÇ>ö±0iÒ¤pÆg„Ûo¿½o‚Ž^÷º×…-¶Ø"Þyê©§†ùóç7‹nÉC`¹å– ‡zhX°`A8餓ÂÓO?—Tç…€B@! „€B ‚@Ç ›1ëîÌšÁ:zÉðâ«#:Œ@ÂfôèÑáøãC† ÿûßÃoûÛ–ÔöœsÎ £FŠyA\qÅ…ó}÷»ßÉn˜9sfØÿý ß›—ð3ŸùL•°!?ò•´ÆÐÚk¯3¼é¦›ÂW¿úÕÖe®œ„€B@! „€=ˆ@Ç›!C‡‡¡c–ÃÇNì㼦…/>.˜×ç¼Ú‡@Âf饗§vZ¬Ìm·ÝŽ=öØ–TÌ6§œrJ¸êª«ªùn»í¶á­o}k<þö·¿½y²ë®»†ýöÛ/^aS…­«¿@Ь³Î:±ŽS¦L‰$ Ux•UV úЇâá\ t$B@! „€B@ÁŽ@G›ac&„K­F­ü†0r¹µÂ°±ËFÜç¿ðT˜óäÝá凯sŸ}(Ìqú`¿›7Þ¸êaãåsŸû\xýë_O}ò“Ÿ ?ü°¿p‰:ì°ÃŠ+®Î<óÌpÇwô¹ÞÈ,lA­ø=æ5oÞ¼è÷Ì3ÏTo†œ;üðÃãñ…^û´zQ_„€B@! „€ƒ¶6ÃǯF¯¶U»þ®aèbã2a^0{fxáö ìûÿæ=÷XflÝ@ØÔjM=¦ֽ^aÓ(rÍß'¦y •ƒB@! „€½‡@[ ,i_sû°Ä¦{Bîù)熗î¹"`y#ií&lp}1bD¸ë®»ÂܹsK7¤•„ V8'¾óÎ;ÃóÏ?Ÿ[—V6Xþ¬´ÒJñï¾ûî Ó¦MË-/½°ÄK„É“'‡Å_ñ°aÃb > ÒLðߢBÝÖ]wÝ@¾Ï>ûì"·-³Ì2a5Ö÷Þ{oCÁ„EØ,©N! „€B@! ÚtxȰ‘a±•6 Ko}D2â•à²õð^8ïåðÌ_¾f?:%,œ7§^r]oV6ßøÆ7¢¢ÎŽJüàÇ$(6yòÉ'׿üåðÄOØ©øI,œ 6Ø ’ï}ï{ã¹O<1¬¼òÊ}Òùv†zì±Ç"©qÖYgÅK×^{møîw¿ë“…‰'†/|á aùå—ïSÜq ¨Oº T3„ $Èg?ûÙÀNS¾í”iCü–<òæMozS8ðÀÃ’K.Ù§ Ô• Ï?üáûœç`Ï=÷ {ì±G<ÿÅ/~1¼ï}ï‹„ õ@ zˆ ô£ý(@¶|úÓŸ믿~¼fןzê©Xç4¸ò¹çž[l±ˆóOúÓ€KÚØ±c«÷Θ1#|ík_ RÄ¢î<šDt¦þõ¯]½‡/;í´S5NÍ·¾õ­pÝu×ÅcÎçÉo~ó›ðË_þ2ï²Î ! „€B@! z¶YØ [bù°ÄÆ{V,l¶+àK÷\ž¿ù¼0ÿùÇKݧÄÅhaC@àUW]5Ä)ÉÈ—_~¹zÙ¡}Ï{ÞÏCN¬°Â Õ4é—üãá‘G ãÆ‹ÛysÅÀÒ䨣ŽêCœØ5ûœ>}zøÄ'>fÍše§B3„áPÍ,ùqóÍo~3Üpà }®°µùÑG]³®×_}8á„úÜ·÷Þ{‡Ýwß=ž{衇A{³äOúSxÃÞ°di!k9ä0{öl;~õ«_Eë(¬‘ÆŒŒª&¨|¡?!Ì8àºÏ÷_üâáøCõœ ÁÑöá8ì°ÃÕ4é—ßÿþ÷áì³ÏNOëX! „€B@!0(ha3bÙ5ÃÒÛ|& ¯7ed^…¨yæ/ß sŸº§ÌmJ[V6V4V4_|qt?¢cß}÷Ê?×ÿñ„ï|ç;–4Z˜ 6>ìJµe[m\l“O>9Z{ðë¬Gj6?þñÄ Hî¹çž¸]8îE[o½uØrË-«Ö X’ø-Ê%l ‡6Ûl³XÄÇßþö·h³ÞzëÅòŒ€ÂòäýïLÇ?ˆ.ˆ#Dœ|Í5׈¯­¶Ú*¼ùÍoÇé/¹ä’ð³Ÿý¬z¯'l8‰Õ Ö(>ø`xûÛßï¯&®|˜ÂâW(‚=਒D?øÁÂÕW_]Mn„à[®c­D¿@âxPºüòËÃÈ‘#£ÕíBž{E ÇY„ ýˆEø±U;Â8¡½äÜ /¼¿ëŸB@! „€B`°!Ð6Âf±å7ÞùµPÑ ËaZQȧÿñóaöã·•»O© #Зvnò±QV_}õhYBÅØ‹ “, »V/†Maƒk;Ná–y듬MŽ<òÈx òçóŸÿ|õr#„ ñj¾ÿýïÇ<°¢Áj—-/\'rê©§FòƒïÞ*'Ë­ 2ë¸ãŽ‹m¤Âʰõ„ dÌÁ^zé%²Bi+‚5ÌG>ò‘>±g¶ß~ûxŽëLÖŽ=asþùç‡óÎ;ÓQp:ãŒ3ªdÏŸÿüçH¨Ùu>¹NÿPg\µøD²›x¡òO1l } ! „€B@! þ‡@{ ›¾þ¿’J|›~ÉQ"lJàU6i;ssIë‚k Š~jaÒÂ&-;ëØb´à„ňI#„ V!ûì³OÌ×\xRÁbè£ýh$9ˆIó—¿ü%&9çœs¢µÏ¬ ѲOÅ)K=ôÐ'†kO·ÞzkLæ ¬_ ‚¼l±ÅÑÅ‹sXîçÆËèÑ£«®F÷ß5-iŒ°h±ØBþÞÓN;-ZAq—¨4ó—¾ô¥°á†Æ[¨¿Å.aãQÔw! „€B@! „@}ÚFØŒ\ní°ôvG…a‹/S¿.Åü—žÏ\ùõ0çÉÿ¸³úÚJÚAØìµ×^}b¡X}-. ((—k lp±š¤}Lbþ¤6ÚßEˆԮ^½šÝ 7„)Øœ={ÖvìØáNëëë³±±±ð÷êo,E`#G-ÁRÓ²(•õ~ü8wï%UÏjnnvç Úèè¨{ï—õVõ(íõã·bËz—C`ã/ËßÙ7á= € € &ø›4+×m°Ê b”^«Ö7¸O?̾²ÌÜkË•¡ôJ[¤6‡²8Œ¹¹9»qã†Ý¹sǦ¦¦,“ÉX¡À¦»»;[ª{vvÖ•ÙÖ¬m«åRÛ¶mËGØèf~¤¾ŽÛÍ›7­µµÕvïÞí^užŽ©’VØZZZLžªªªÜGÓÓÓîÚçÏŸ[WW—»¶ººÚ˯ôTn›oÞ¼Ùúûûݳ*øR€511á–³å— ýxE@@Ê]`Ͳ¢òï?D3 þǼ_#¤6*Ç­?àUvÚoÇw3X 6Zò4pé’Õ¯Kµ÷ÁÆÃ |T=*ªÀFߣ½rü0(ÿ»UEJ3m&''s©’•öÁÉNÿ¤[·nÙ¹sçüÊ®J”nhhÈqþƒŽŒŒØ•+Wüx € €¤Fà lR£›Ð]M`£ŠKáž"SS¿ÛéÓ½Ù§ ƒŠåª$…K‚òK_k)O[[›»×þýû³÷ ßh™Œf’„}Õça`ãïE£™.\/³ÚÚZÓ^69AÈÌÌŒ VN:e Ÿ6þ2¬Ã‡ÛÛ·ÅW)Ó,™'NXGGGÎwÊåÅ‹ÖÛÛkOž<ÉöÑÓÙÙé6Jö+[鸼4ÓDK»ò›?éäÉ“v÷îÝœS¶oßž y®]»f—/_Î9®ÿ„e½|¸ªkÿ«‰‚ZâóôéS{ô(Ø@»È¦ÀFˤ–èZ< |ÒÖôü7nt¡Â. € € V›Ž|)6)@@@#°Iá€À&…ƒÎ##€ € €””MI W4%°‰Æ‘» € € € —M\² ¾/M‚‡®!€ € € ؤðg@`“ÂAç‘@@@JJ€À¦¤†+šÎØDãÈ]@@@ˆK€À&.Ùß—À&ÁƒC×@@@lRø3 °Iá óÈ € € €%%@`SRÃMg l¢qä. € € €Ä%@`—l‚ïK`“àÁ¡k € € €6)üؤpÐyd@@@’ °)©áЦ³6Ñ8r@@@â °‰K6Á÷%°IðàÐ5@@@~ € € € €@Âl6 t@@@ °á7€ € € €$L€À&aBw@@@@~ € € € €@Âl6 t@@@ °á7€ € € €$L€À&aBw@@@@¼3ô‘÷ýdWIEND®B`‚monorepos-ex-easy-post-release.png000066400000000000000000004204441506116242600347160ustar00rootroot00000000000000python-semantic-release-10.4.1/docs/configuration/configuration-guides‰PNG  IHDRnˆ!¼Ï JiCCPICC ProfileH‰•WXSÉž[R!D@JèM‘@J-€ô"ˆJH„cBP±£‹ ®]D°¢« бaWÅîZ *+ëbÁ®¼ tÙW¾w¾oîýï?gþ9çܹe ·ó¥ÒT€\Iž,&ØŸ5.)™Eêd  ®|\Ê‰Š Ðηw7¡´kJ­öÿWÓŠä(ˆÓ„rA.ÄÀ›RYD)äͧæI•x5Ä:2 ÄUJœ¡ÂMJœ¦ÂWú|âb¸?€¬ÎçË2Ðè†<+_uè0[à$Š%ûAì“›;Yñ\ˆm œ“®Ôg§ý “ñ7Í´AM>?c«ré3r€X.ÍáOÿ?Ëñ¿-7G10‡5l꙲eΰnO²'‡)±:Ä$i‘k€âbaŸ¿33!ñ*ÔF çš&Äcä9±¼~>FȃØâtINDx¿Oaº8Hé뇖‰óxqëA\%’ÆöûœMŽ˜÷fºŒËéçŸóe}1(õ¿)²ã9*}L;SÄë×Ç 2ã!¦B/Nˆ€XâyvlX¿OJA&7bÀG¦ˆQæb±L$ öWéc¥é² ˜~ÿ¹òܱ™b^D?¾š—¢ªöDÀï‹æ‚u‹$œø‘|\ø@.BQ@ *wœ,’ÄǪx\Ošç£‹ÛIs¢úýqQN°’7ƒ8Nž;06?.N•>^$Í‹ŠSʼn—gñC£Tñà{A8à‚À ØÒÀdÄ­]õ]ðJÕø@2€8ô3#ûz$ð ÀŸ‰€|pœ_¯äCþëVɉ9ÕѤ÷÷)U²ÁSˆsAÈ׊>%É` à dÄÿˆˆ›æ›²ÿßóìw†™ð~F10#‹>àI $CˆAD[Ü÷Á½ðpxôƒÍgãy|÷'<%´nÚ w&‰ eC¢ Ú¡~P}Ò~¬n5]qܪCeœ‰ÜÎÃÁ}áÌ®åöÇ­¬ kˆöß2øáõûQœ((eÅb3t¤††ë Š²Ö?ÖGkÚ`½¹ƒ=CççþP}!<‡ õÄa°sØIìÖ„ÕvkÀZ°£J<¸âžô­¸ÙbúâɆ:C×Ì÷;«¬¤Ü©Æ©Ó鋪/O4-Où0r'K§ËÄ™y,übˆX<‰ÀqËÙÉÙå÷Gõz{Ý÷]A˜-ß¹ù¿à}¼··÷Èw.ô8ûÜá+áðwΆ ?-jœ?,PÈòU®<à›ƒŸ>}` Ì ÌǸ/àA(ˆq L„ÑgÂu.SÁL0°¬å`Ø ªÀn°Ôƒ&pœ—ÀpÜ…«§¼ÝàøŒ  ¡! D1A,{Äa#>H ŽÄ IH*’H2™” +‘rd RìC#'‘ Hryˆt"¯‘O(†ª£:¨j…ŽDÙ( CãÐ h:-@ KÑ2´Ý…Ö¡'ÑKè ´}ö`Sؘ)怱1.‰%cé˜ ›c¥X%V‹5Âû| kǺ°8gà,Ü®à<àSðÙø¼¯ÂëðÓø5ü!Þ#І{‚'GGÈ L%J Û ‡gà³ÔAxG$™Dk¢;|“ˆYÄÄ%Ä Ä=ÄÄ6âcb‰DÒ'Ù“¼I‘$>)TDZGÚE:NºJê } «‘MÈÎä r2YB.$—’w’‘¯’Ÿ‘?S4)–OJ$EH™NYFÙFi¤\¦tP>Sµ¨ÖToj5‹:ZF­¥ž¡Þ£¾QSS3SóP‹V«ÍU+SÛ«v^í¡ÚGumu;u®zŠºB}©úõêwÔßÐh4+š-™–G[J«¦¢= }Ð`h8jð4„s4*4ê4®j¼¤Sè–t}"½€^J?@¿LïÒ¤hZir5ùš³5+4kÞÒìÑbhÒŠÔÊÕZ¢µSë‚Ösm’¶•v ¶P{öVíSÚÜÁeóÛg:DkžN–N‰ÎnVn]m]ÝÝiººGuÛ™ÓŠÉcæ0—1÷3o2? 3Æ&¶xXí°«ÃÞë ×óÓéëíÑ»¡÷IŸ¥¨Ÿ­¿B¿^ÿ¾n`gm0Õ`£Áƒ®á:ý† †ß?ü7CÔÐÎ0Æp†áVÃÃ#c£`#©Ñ:£SF]ÆLc?ã,ãÕÆÇŒ;M&>&b“Õ&ÇMþ`é²8¬Vë4«ÛÔÐ4ÄTaºÅ´Õô³™µY¼Y¡Ù³ûæTs¶yºùjófón ‹±3-j,~³¤X²-3-×Zž³|oem•hµÐªÞê¹µž5ϺÀºÆúž ÍÆ×fŠM¥Íu[¢-Û6Ûvƒí;ÔÎÕ.Ó®Âî²=jïf/¶ß`ß6‚0Âc„dDåˆ[ê‡|‡‡‡ŽLÇpÇBÇzÇ—#-F&\1òÜÈoN®N9NÛœîŽÒ:ªpTã¨×ÎvÎç çë£i£ƒFÏÝ0ú•‹½‹Èe£ËmW†ëX×…®Í®_ÝÜÝdnµnîî©îëÝo±uØQì%ìó9M=Ý<ó<÷{þååà•íµÓëùë1¢1ÛÆ<ö6óæ{oñn÷aù¤úlöi÷5õåûVú>ò3÷úm÷{Ʊådqvq^ú;ùËüù¿çzrgqO`ÁÅ­Úñå‚Ì‚2‚j‚ºƒ]ƒgŸ!„„…¬¹Å3â xÕ¼îP÷ÐY¡§ÃÔÃbÃÊÃ…Û…ËÂÇ¢cCÇ®{/Â2BQ "y‘«"ïGYGM‰:MŒŽŠ®ˆ~3*ff̹XFì¤Ø±ïâüã–ÅÝ·‰WÄ7'ÐRªÞ'$®Ll7rܬq—’ ’ÄI ɤä„äíÉ=ãǯß‘âšR”rs‚õ„i.L4˜˜3ñè$ú$þ¤©„ÔÄÔ©_ø‘üJ~O/m}Z·€+X+x!ô®vмE+EÏÒ½ÓW¦?ÏðÎX•Ñ™é›YšÙ%æŠËů²B²6e½ÏŽÌÞ‘Ý›“˜³'—œ›š{X¢-É–œžlÅsÊš)ݲ0Ùv9"Ÿ oÈÓ?ú- ÅOЇù>ùù¦&L=0MkšdZËt»é‹§?+*øe>C0£y¦éÌy3ÎâÌÚ2™6»yŽùœs:æÏ­šG—=ï×B§Â•…oç'Îo\`´`î‚Ç?ÿTS¤Q$+ºµÐká¦Eø"ñ¢ÖÅ£¯[ü­XX|±Ä©¤´äËÁ’‹?ú¹ìçÞ¥éK[—¹-Û¸œ¸\²üæ ßU+µV¬|¼j쪺լÕū߮™´æB©Ké¦µÔµŠµíeáe ë,Ö-_÷¥<³üF…Åžõ†ë¯¿A¸áêF¿µ›Œ6•lú´Y¼ùö–à-u•V•¥[‰[ó·>Ý–°íÜ/ì_ª·l/Ùþu‡dG{ULÕéj÷êê†;—Õ 5ŠšÎ])»®ìØÝPëP»esOÉ^°W±÷}©ûnîÛß|€} ö åÁõ‡‡ŠëºéuÝõ™õí I m‡C77z5:âxdG“iSÅQݣˎQ-8Ö{¼àxÏ é‰®“'7Oj¾{jܩ맣O·ž ;sþlÐÙSç8玟÷>ßtÁóÂá‹ì‹õ—Ü.Õµ¸¶úÕõ×C­n­u—Ý/7\ñ¸ÒØ6¦íØUß«'¯\;{wýÒˆm7ãoÞ¾•r«ý¶ðöó;9w^ý–ÿÛç»sïîß×¼_úÀðAåï¶¿ïiwk?ú0àaË£ØGw ¿x"ò¥cÁSÚÓÒg&ϪŸ;?oê ê¼òÇø?:^H_|î*úSëÏõ/m^üËﯖîqݯd¯z_/y£ÿfÇ[—·Í=Q=Þå¾ûü¾øƒþ‡ªìç>%~zöyêÒ—²¯¶_¿…}»×›ÛÛ+åËø}¿PnmÒx½Z ¸o¤ŽWíû QíiûøOXµ‡ì37já?}tü»¹ÀÞmXA}z Q4â<:zô`ØËõí;•F„{ƒÍ¯i¹iàߘjOúCÜCÏ@©ê†žÿsƒ ‹v›ŠeXIfMM*>F(‡iN’†x n ˆASCIIScreenshotJÖyá pHYs%%IR$ð×iTXtXML:com.adobe.xmp 648 1134 Screenshot œfX¤iDOTD(DDÃÞÓ@IDATxì¸UÕ†w)$!!!†ZB'Ò›€€¨4¥ˆ(M±þŠ€A±Š*"ŠTPªHèÒ{U:¡HéÉÞ ë°ÎÎ̹3§ÝsïýÖóÜ;m×oï™3뛵Öîõ©O}j~! „€B@! „€B@´½Dܴݘ¨AB@! „€B@! „€HèµÂ +È⦇M†}èCI_}õÕÖsuW! „€B@! „@×B@ÄMׯ†´VÄMC`T!B@! „€B@! šŽ€ˆ›¦CÜ~ˆ¸i¿1Q‹„€B@! „€B@¤! â& •n~NÄM7`uO! „€B@! º "nºÍPæïˆˆ›üX)¥B@! „€B@ÎD@ÄMg¢ßIu‹¸é$àU­B@! „€B@‚ˆ¸)XwH.â¦;Œ¢ú „€B@! „€=7=a”£>Џ‰Ñ¡B@! „€B@6E@ÄM›L3›%⦙èªl! „€B@! „€C@ÄMã°ì2%‰¸é2C¥† ! „€B@! „@G@ÄMœ"nzà «ËB@! „€B@!Ð%qÓ%‡­¾F‹¸©?åB@! „€B@!Ð*DÜ´ é6ªGÄM †š"„€B@! „€B  "nª€Ó]/‰¸é®#«~ ! „€B@! „@wC@ÄMwÑýq“$%B@! „€B@!Ј¸iƒAhuDÜ´qÕ'„€B@! „€B 6DÜÔ†[—Î%â¦KŸ/„€B@! „€=7=h°­«"n m…€B@! „€B@´7"nÚ{|šÒ:7MU… ! „€B@! „€h8"niû(â¦ýÇH-B@! „€B@4 W\1ì¹çžá‰'žW\qE˜7o^£Š.TNŸ>}®»îVYe•pñŇ矾Pþžš¸S‰›Å_<ôêÕ+L:µÛá '„ýû—û5þ¸ãÚ¦Ÿ"nÊâ! „€B@! „@·F`Ùe— §vZ¹ÿùÏÂøñãËÇ­Ü9ñÄê«®Z®òË_þrxíµ×ÊÇÚIG ¥ÄÍ AƒÂÞ{ï6ÝtÓ0pàÀ„´¡Y³gϯ¿þzxê©§Âe—]Ö-X·¿þõ¯åþÑǯ|å+áÕW_e·ÓEÄM§ „€B@! „@vØa@\Ô´iÓ›o¾þ÷¿ÿ…k¯½6Ñ9ã4=åøÈ# ›m¶Y¹»óçÏ{ì±Gù¸•;±ž|à 7„ÓO?½•Mè’uµŒ¸ÙvÛml6Õ„ItÎ9ç„k®¹¦Z²¶¿OH7m?dj B@! „€]ßþö·aĈ¹Z‹ŽùÀ„Ÿþô§=’ÀÁpâßøF«É“'‡ƒ>¸|܈µÖZ+xàå¢^~ùåð³Ÿý¬|l;gŸ}v:t¨†“N:)Ü}÷Ýåcí¤#ÐâfŸ}ö »ï¾{z 2ÎÞ{ï½w£®*"nºêÈ©ÝB@! „€B@´;Eˆë˻ᄒõ­oõH× >ºñÆáÍisÆg„ÿþ÷¿KC¶;ï¼søâ¿X. ‹§ý÷ß¿|l;«¯¾zbбÄK„;î¸#i‹]Ó6¦7A‚iK³´yã7ÂŒéÓ襗)ŠåÏþsøË_þŸîÇ"nºÄ0©‘B@! „€B@tAb⫚9sæ$=éÛ·ofšam’YYº—¸éA4´«M'nâŠÖ?ú裠DÓK¤ÉV[m¾öµ¯Ùa²%öÍg?ûÙò9L¼`æLn½õÖ0eÊ”°Þzë…Í7ߘ譩¾r©¥– ÿøÇe ³’}/¯7[Y:ë +¬>ö±…áÇ'-7ß|sÒ'ð0!¾¬¹'a`±ÓN;Ù¥0kÖ¬pÝu×%ÇèèèѴ祗^ &LX(nëòË/Ö^{í°Î:ë„ 6Ø \ºüù矟“ KV?`crÛm·…·ß~ÛË[ ;vÜqÇ0vìØ$=í"ˆñM7Ý”èáå„n' “‘#GB·Ðâ1&Ä=êJÒTâ†ùÝï~WÇ;ï¼:è ÔåÇ n˜^Ž=öØdÉ2Î]xá…¡_¿~åË¿ùÍo?ºÅ[¬|îÒK/MÒqò…üžì)',íÀÊžyæ™å‰i×~ô£%7›cÂÅăp‰eæÌ™‰¹“ÑKLÜuÔQáûßÿ~èïVš²ô,Çvî¹çÚaÓ·"nš±*B@! „€B ‰T#n|µ¸ï` â…ÿ_øÂü©dm£6Zè<'æÎ¨3íÃ=:ê÷¾÷½0f̘Լœ¼êª«ÂïÿûÔëè‰ÄˆÉ’þóŸá¬³Îª¸œ¦³Ž5*`d`‚ñÁ7¿ùÍ„B7A'7·¦Áƒ‡?üáv)Ù‚Ãw¿ûÝqË-·ÜR±BUšça‘\µ3*2ùÕ¯~•1vÌv›m¶ _úÒ—R½r¸þÖ[o%1{G/i˜ óo²É&>Y²Yt\iågbñti*qCp"ÏÞ›lß׿þõ rW)³ ‰‰˜Ç˜1â–ó‡?üaª‹V<0ûÛߟþô§òéxÀ§—¬rúP¾ï`}ùc7,yS™%ÜÄÜÌ­7­@Yu! „€B@!Ð,ò7Ôÿƒü ¬¹æšM‰ùêW¿¶ÞzëŠ4ñ$ÀÉ'Ÿî¼óΊK\pÁBziE‚÷yä‘püñÇW\‚ ÁJ§#yì±Ç2ÅÒ-¤³¦èƵ7iz¶ÕËò«%¤ÑÄ $ ”ÓB­$¾ÿÒ†•ż5RŒ zzùdåxBÉε붩ÄM >‡ŸùÌgjÆ"&nÒ 2ↈáÞ݉› V­wïÞaÈ!YÐÏþóåsq»Ëªì`>wôÑG—SÄÄ] i“w©C=Ô’5u+⦩ðªp! „€B@! šŒ@â¯/¿þõ¯Ã7Þ˜œJ[LB×\—|Ìô¹ï|ç;e¯\xì’+È"\¯LÈ‹¥õ‘Ã?|!¯ÒØ7pøñî¿ÿþ$oµVâ&© Ê?ÚxÌ1Ç$.d{ï½wâZ¶È"‹,d!ƒw òä“O– «j7XXÁÄú2c±è¢‹.T>£CÏ›7/©'&IB÷oüøñ™®W.Y§ï6•¸Á쉉n’FNàæäM¥,­m™ÔfÉ’FÜàŸwñÅ'¾2˜MÁ¾qÎcÖÆ€@® Ä™aBxá2S«´g²üò—¿ ÷Ýw_â/ #ëo`ÊÚsÏ=Ë“&&n˜Ü¿øÅ/¾—ÔσûpÑÆ½öÚË7©iû"nš­ B@! „€B !nÒÜðì`1H¯;Òtt¾Ÿüä'I/¸NˆbИøö±Å a6°Ê1a¥dì19çœsÊ¡:¨ßë”` 4cÆŒ„¸!¯×§‰‹K’¦³¢7_tÑEá®»îJôRÊ&¤±sЏJQ>±dп)Oš8Vu`,a’78q5â&æСÑãY‹qÀ'v{"4‹Å«IÄժÑÃáÐùãü]eA¤¦7LJONÄæ] rGþ|˜¡Ùúï1qIsÈ!‡”É›4y¶qYÔa&oñ€3a`Qa,M`¹I½x¶.&nþõ¯%ñt,ý;ì´ÝŽÙÒ‚%5[DÜ4a•/„€B@! „@3(BÜÐŽK.¹¤üaŸã믿>!d6Ûl³pä‘Gr*È¿@'‡Î>ûì JÿýGw\ŸÎk‚µ Ä áTôdôe/„ ñ±S üëI Ö9à€’,±ÎJ›pJ[ø¦(qÃ"<è¶^bRå•W^ 3˜ÔKÜ@A¢x‰cÁBÞLJ yæ™gÊO1&iK’Çó€àÈ;í.M%nâÁ5S-JGÄgòb²¥Z€'«c@)6 +Nì ¦•c‚Gyæ’´Õˆ›4K!ò\PŠ’ícßk˵˜¸!ÞŽEÒæ:m ?^x`<ÿüóþTSöEÜ4V*„€B@! „@‹(BÜà‰Âî…Õˆ‰MÂ*R^ÞKnr’g—]vI t ‘‚wÖ"—_~yxî¹ç*Šdå'ðšàV„ËV^‰IŠ_|1q½JË_”¸ÁÚ«#/q?c‚«^â«$ORQwZ|\“|ä#)7ÍZŽ1I# N9å”!frÏ=÷$+^Ûq»n›JÜà÷æ—Ãô 3+/1C鯱ï—u‹‰˜1²4!h1fd«®ºjÚå…ÎU#n`K!˜b‰b‚‰‰›8øUZ7 tú'„€B@! „€¨Š@↥ªÍÅÈ 5V]òKeÛõ޶ħ,Á „eÈY†»šWõÄO,ÇS9âˆ#Ë3iÒ¤ÄbÆŽ;ÚÆ$…÷T‰ó%n ,>••F¬yEšz‰›M7Ý4q…²úðzÙc=ì°¼¥NB”˜xÂ+Æ$v[#Ol8"â¦Jä ðYvÌbÉì§÷ë;ýôÓ+Ü«Xí‰UŸ¼Ä 7ì©_:<)àý´ƒ4^ª7O<ñD²¬¸OÏ>m5ëŽýÄq"! „€B@! „@ã(BܰÍ®»îZÑ,7ˆU($ÂZ&ôVôH „‹[«!è–|p€|H *le‘ßÇõh—2òJ’ÂÊ*JÜÐÚã–ؘb…>!õ7n¸aøö·¿í«¬ˆ!kb )âí»ï¾Éå<˜ˆ¸1$Ý–FÇ{¬;SInT\(`%áâ…ü'H^â&t&Ó 7ÜÂŽ1ŠT¸h"Q,q0©sÏ=7à‡‡ˆ¸‰ÑÒ±B@! „€B 1ä%nXiø¤“NªøpïÝ|PúwÛm·r£²Âd”t°ƒå +Maåƒ÷‡-˜cÙÌ2f¿ýö Ÿþô§ítÈZyøðá›X™7x± yH +¼(qãWܲ2X=Ê[À°x}0©—¸Yf™e’Å€¬<¶F®ùs±ÇŽ'¼ò`"âÆ£éöÿøÇ?V°Ž(gžyf9’¶%eBCžÄ&f~¥¦¼Ä Lä‰ú͹´¨âÕˆòŒêšk®a7‘øFã$Q®ÍwQÄÍœô_! „€B@!Ðhò7Ä2aå#ïáA;üªQ«¯¾zB‚XûÐWYÉ;C¼T¬kzk ÛŒ;6YªÚÒ±ò“Y¡ ãžvÚiaÔ¨Qv9±Ò!ŒFšû˜ÄKãn…õ’‡¤°rŠ7¬ìLˆ³<¢ßèìm‚Þ‹þk7`€.oXXºØ€‚¸¸¬`®ñê^<òH€¨1A?묳"ËÎ=ôÐCá?øAr˜7†\´­_ì2+Lxó2–ÇÆ¬,øt÷Ýw'ì¨åÉKÜp#l³Í6–- …ï">ˆ¸6l)®«#↠‡ÿÁ¥ˆN›½»×=)⦠¿v„€B@! „€ E &n ZF‹@”`Á6\ƒŒ€˜ñá;b=“€·Çw\²Âä ±H}W…ОÐð¤V2§žzjÒêFn¹å–„Ìa?öà m¬štóÍ7‡6Ú(!OðJ1áü/ùËä0IaùŠ7äÃ’…úèÿV[m•?Xyl![ ]L¶Øb‹…‚#?ðÀI`ftþ×^{-IšEÜp="ÌËu×]Xh¹å–K‚91Â_NHÈ$&"n*à«< ˆplIS™bá#nØDÌØLâ*+8qšÿep#0ñÒ¤#â&-?wõÕWW,ç&âÆ££}! „€B@! „@㈉›¼%›…‡Oÿá8‰=ã?Ìs=K´©H X>È,Dpsò‚A«-#/@ Åõúô~ÿË_þr™ÉCRXÞZˆË›¶EG?äC*È/\ºð¬IˆpBª7,»Ž›VV¬Ú¸ìØÐ#&"nbÝ1L'þiX©äHØ6Ì̼ä%nÈÀ{“4_Ö1Ü„þ&ªFÜpc=:ó†J[æ\ÄG\ûB@! „€B@Æ!P”¸l8á„‚YgÄ-ÉK¢ £úª g0(€tèHüŠÉ–v‡vHH;NÛ¢¿OõÊ+¯,_ÎCRXâ¢Ä ¡F²Vg¦-X#™u“ÕÁ·°e—]ÖŸJöó7$^j©¥«¢4k)_p<\˃‰ˆbÆ>7,¡7÷òI™·ÞzkBº@¬ÄBàbŸ×“-qZXKn¨5ÖX£ÂÊ:ÌÕðÇã&3!`Œ’6à&LHL¿|Ú‹ëK€C6y‰‰ÏŽZºK.¹¤¢m¶¤œ]oÖÖVÂâ’! „€B@! ºgœqF¢ägµ]mÖ¬YI¬šk¯½6!=ÒtLŸwŸ8`¡°¤™^ |~I¥¬49ì°Ã–[nYa@:ÚALÚkqqâü+,s¯HE^,wÐ_‰÷â…¸.k®¹fùÔm·Ý–Hån'&‡Ð‰‰aƒ ßâžå…¶°jîaÞ ÎùýïÃ7Þè“—÷i?^3›l²I…ÕÌ‹/¼/-ŽÄ1pé€ÈŸ£Ž:ªÂÅÌ®ÄO9å;UÞæÁŽ€øB&„o!R»K¯’ ÓüV7Ó'ÀÂ¥‰ýçŸ>ófâĉ /jDÛpÓ"¦ å{_Ƭ²Óˆ›“O>9IÎÄf {–c…*ïÊ•U^»qÓn#¢ö! „€B@!Ð.`õ±ÒJ+%:ä¤I“’UŽYE)È]ý}ñ©§žÊ­ã¢§áé1hРDwÅò¥Ù’FÜì¾ûîIµ6à@{^~ùå²›VÞ6¡ëC–Õª3C­²Ê*¸66Ĭ-‚gÞvv…tBÜ´;0Õˆ›vo{žö‰¸Éƒ’Ò! „€B@! º7Õˆ›îÝó®Õ;7)ã%â&B@! „€B@n…€ˆ›®1œ"nRÆIÄM (:%„€B@! „€Ý 7]c8EܤŒÁ˜¨dBª«®ºÊ»üV®R]~Õ! „€B@! „@Ý,¶ØbIp^ BL@g–,—´"nÚk! „€B@! „€µ! â¦6ܺt.7]zøÔx! „€B@! „€èAˆ¸éAƒm]qcHh+„€B@! „€B ½qÓÞãÓ”Ö‰¸i ¬*T! „€B@! „@ÃqÓpHÛ¿À®JÜ ³]è·ü†aÑcBŸK–€îÕþ`÷ˆÎs§½f½þd˜ñüÝá½''4¥×Ë-·\9rdXb‰%B¿~ýB¯^ÿ¦­B†ÀüùóÃŒ3ÂÛo¿&Mš^xá…†•­‚„€B@! z"nzÎX—{ÚÕˆ›þ+n†l|p‰¬^îƒvÚ¹ÓÞïÜyV˜þìm i$„ÍØ±cCÿþýRž …ÀôéÓÃOZ>ÖŽB@! „€¨†€ˆ›jètÓk]…¸ñ¤Íœ×ÿ±Z§Žˆˆ›|ð÷]yÛÐoý“Äõ7ž´y¬×¸0s©ò5 I©4þM¶»Â +” ‘7=hàÕU! „€B@Ô‰€ˆ›:ìŠÙ»qƒ{Ô°mIàûÖ3¡ÏЕ:j)îù‡ Ï‡Ö ¶8:É0ùú »Má5nܸ$ÿ#ý¶³‡,°âÉß‚Æ§Ôø7ÓžX¢'o|ðA¹MõÄI > ! „€B  "n Ö’wâæC{Ÿ›Ä´iKs)î†D¾­YÞóæÕ‹È—éýTÛm·]Ó¦,m¬áCBÛz0ò†˜7&4'˜w½mT~! „€B@öA@ÄMûŒEËZÒîÄ «G Ýâˆ@L›ÞK,ß2\:ªHŠ{G-|}À6Ç'1oÞºåÔÜ«M™µÍ´¾#ÃÃ>µp¡tFãßIÀwÓj±(#ö¬nºé«[B@! „€h "nfW)ªÝ‰›aÛúÞ$µ¶™ùÊ#a±Qk7m¤¸‡Ö¬n¦O¼=Lžð“\l°ÁaÔ¨Q!¯µ ãž&ž ÿ4”u®VÌêæ•W^ ÷Þ{o­Å(ŸB@! „@@@ÄM下íNܘ›Ô¼i¯…Þ—Š›¿ÐñÔûÿ¦”þL¯·OTúk´Hq/Ž(«M Üé”PÄ]ÊܤîT²¶02³R›7®Z)+Q#ç‚Æ? e¯V›ZwÝuƒÜ¥jAOy„€B@!гqÓ³Æ;ém»7ËtE©½Âüys;\òÙH¬+ kf•”yHއï|BCGWŠ{mpÚ Rm~xéìOä*`—]v ½zõ ÷8(sümÜ)ÐÆÞnóÀ®7b.hü=ÂÚo[n¹e˜?~¸òÊ+QœÊB@! „€è¦ˆ¸é¦[­[õ7Ä é·ü†aÑcJ„—Lªš;íÍ0ëõ'ÃŒçïÎË$«Ë”_‰1Þç1K ”õFºËtuÅýˆ#Žk¯µVxgÊ”ðË_þ2Lœ81kz~qJÄÍ.¹ÊýÄ'<÷<43½•åÇ=-±ÍFy]}üÓðѹÎEâ¹â ÈêÖÈ—¿üå°ÚرåÊ.úóŸÃwÞY>îÎ;ßúַ²Ë,SîâøãŽ S§N-·bç„Nú÷/WÕm(WÞÉ;;ì°CØéã/·âòÒ}pýõ×—Ûm§ï>r :4Lž<¹Ýàê2íéÝ»w6lXxã7ºL›ÕP! z&BÜô^lP‚ö¼™­}aê™C¼p¯k%nX¢{ÈÆ'«=-\êgp‹yçγ /m%t¤Œ[:¶æ*ã-*LYO#n u¸n„N—ª®¬¸øÃ_ÿú×ËÐAÚ|ÿûß/7s§ÑÄsæes€>¤Yߨ|¨—¼iÕøï¶Ûn«£¹s熃>¸™CSQvŸ>}×õÖ[/,µÔRáÉ'Ÿ ÷ÜsOxî¹ç*ÒuÖÁ*«¬_|ñ0¥D<>óÌ355£Þ2ˆK³Í6Û$1˜pq¢W]uU˜3gNMíé âæŒ3ÎHÆ×ü׿þ5\tÑEvØm·ûØÇ—¾ô¥rÿ^{íµ€"Þjo”m“¯|å+áÕW_µÃµ9rd8ýôÓËxp}á _ï½÷^[âÐ.÷ÍwÝõa…F'ΙOü^¼õÖ[É™ßþö·É~^É?f̘°VéÃÛe—]6Ì›7/)ãÍ7ß 7Þxcxøá‡ó—¤>|x8òÈ#ÀÊùø=ùÓŸ>pk/_ȱÃïŸh› DËüc;,´ÅU•çXöíÛ·œwÆŒIÿøÇ?†Ù³g—ÏkG!д„¸éµÈbaàê;•¬46 ‹Ž[rXðœ?ov˜5鉒•Æ]aÚ®óçÌlLº}j!no°4n¯BØL}ð’0åÞó å!qâÆ”xOÒ˜¢—cç©%ž¼E”ùV)Ѳ馛†ƒ:¨\,ŠÂ1ÇTSN\çN£‰Gs³¾¡™Yãiy²®çéb«ÆŸ}ö (™¸Ð|ñ‹_ÌÓ´ºÓ|ä#I”X¯PZ¡Ì•ï~÷»ö»è¢‹†ÿû¿ÿK Ú„R‡²[DQÄ'h,(›(t÷ß|©Ãc7BÔýúõ çž{n…‚öÍo~3<ûì³ )¿H!"n*ÑúÚ×¾¶Új«òÉGy$üñåãvÚiâf×]w Ÿÿüç«Âá†ÿý﫦ãyÏÇí·ß¾Lžee˜9sf˜0aBøý¤|~çwNÚÙâåÿû_8úè£ý©\û«­¶Zÿï„þŽ"#mâ÷²¨¬³Î:ÉoZÚï•søá‡Ë ÇÑV¶@ éÄÍÀU· C6=,@ÞTH›wnûM˜ö¿ö5“­Öþ®t­(qS icxÔBÞÄ„‹••µÅâÂH¶ˆYcø<(øþ¼)óFø´iû­RÜÓênÄ9Lô{M¾r² q+¤ˆúiã]+yÓªño5qƒÒ´ÿþû—_ܱhÁš„/¦öâ͹o|ãÉWÝVÌ«ƒ¯¿Ô»Øbü~%nQ–n¸aÒ,µI“&ȾÚ"œûÉO~žzê©ä8ï?7y‘ª/$5«Õ™<ÿüó‰5€·r+â¦íþ%·±óÏ?¿üüáê÷¾÷½ðè£V&lƒ£Î&n ò!EòϤãJ®€ÿùÏR“¯¾úêáÛßþv4h¼%Âr‡ç=¤c㟽¤Á%‹rӬİ®aì°lL“Zˆ¬bø‘&µ7+®¸bøÙÏ~V1ßÒÊæåcÆV"„€hšJÜ,ñуÃÀ5?Y¨ŸÓûGxû޳ åQâb!np¶ml™1?L}øÒ0ýé[Âì7ŸN*ï»äÊ¡ÿÊ[„AëìV:þÀ œ‹“¯?¡ÛTQâ†:¼Õ…'g¸†˜eN|ÍÜl o:’V)îµ£žë¸š¼ûî»õQ8o³‰C)SÍý­ò¦Uãßjâ³z^ÎyaÿÑ~”˜Úƒå"‹,’#FŒà0üîw¿ wÜqG²ßŠŸûÜç·$«‹öA$!nQÆÒK/]6Çç—…¾øâR€GqS­ÚÒ¢€^|ñÅe’R˜ó×]w]mÖ™KÄÍÂþâ¿(¹ý¬P¾ðØc%åm²Ó™Ä $ñÙgŸ]A8ð<ºå–[Â믿6Þxã°ÒJ+U …›+Äw,õX:™ðl½ôÒK‹š8Î ®IŸùÌgÂG?úÑrÝÔ ¡Ác²òÊ+'ÏIïvd×l[„¸¡ð&~O–Ð{þf¥‰ÏÿêW¿ <Ó½ÐgˆB,Oè/…[o½5œzê©çt „€è,šFÜ ÙèÀ°øÚŸNí×Ü ö}ú N½þ•b¤œ“zM'ëG qcKs[­sÞy)L¾á§eÂÆÎÛgØ6G…E†|²ÈRДÓLâ&¶®‰­p¬iÛ"ŠûrË-W~áÀWܾ®½öÚÉËŠß]wÝ^~ùårUË/¿| ¾È’K.0઺§@Â`BŒEŠ7/E¼ðšRY.¸´ƒ¾Æk”OñµŒ/F´×äé§ŸÓ¦MK¬s6Ûl³äE†sø§ó¢TTšMÜmO­äM‘ñÏj_0±daÜyYN“F7Œ)–U^…”ùéOš4åöŸÿügE“˜c¼è"ÌáŸÿüç×›uàÉÌþ ¦ò€‘—¸iDôï¨£Ž |FÎ;ï¼$þArðþ¿“N:©3«6bå•F7´ EŒ±ä>Çê‡gÏž=±d)Ÿ(ÎÛn»m¢üчÛn»-pßWˆ´wÜ1Œ-;æ¹5kÖ¬ä¹rÓM7e~éç™…’gòøã'.KÔÆýAxóÍ7[’dKðP—/S .̼äË?ϼ«¯¾ºês…– Ä&ÜwÌ%,L¨s£6²ÃÄ¢êÞ{ïMž`²êª«¬t.¼ðÂrvP.‰G……÷÷ÛK/½.¿üòðöÛoW¤µƒ¼ÄM-e[üð[ã±fL¹‡Á;ëÙC~ê%/ñN'žùäå7‰­ÇÍêóÛ-¶Ø"q)d>ò â·’¬Ú\ãÃ;¬\ ÷üg?ûÙòq#v²Æ+‘í¶Û.é+$îs×\sMÀU&–¬{ÇÒ <8l¾ùæv˜lzè¡ðâ‹/–ÏïN;íF]¾_ ÈÌ~ÿGUNËo®ýNï½÷Þa=ö(_#È?Ï~ðƒ„5×\³"MŒ#ýýÃþÔMBæëñ%·ªŽ‚c©òÃRùæ®Ùq衇–ëúä'?™XnÚ æ sËD“"Ä –@ç–ܽ¼óÎ;IL;W”¸aNDZýx³ß@Êǘ~ì»ï¾5½ûX;µB@4 ¦7ýWÚ"QÞ}#çÍ|·d¥ñ×0ý©›Š<ÒgàðЕ­JV{„Þ‹-î“'äÀôgn©8§ƒÆ —¸aõ¨¡[á*^»ìˆLÒÆBÞ,õi¾P|`yóÖ-§æ^mªÄ m3—*¬n-Yf Àc‰“9Öx[Dq?ùä“EÆÊ?~|òr„bå…¶ .¸ ‰áÁK…^PšQ¼ðRŽïµ)“þû|ù?¾ô"æ-kP¬¼o9/à(™±oˆcBð@(³´°ó¼(Ò^à‹H£‰³žÊ;nim­…¼)2þÔy_G{÷.­Úv{íµWÅK,/¾¼$Æv«7ÌÆ…%‘`¦Hñ¿ß~û%Š·½,£8¡„0Þöåõ;¥X¯¼òJ ãL»âÈø/¨ÉÉÒ?,mP6øª ‰QT0KG©G1ȲHáÅÅÊÒéÂcˆ\·~øÃÖDÜÔS}=í´Ó Ø€},|åÆu!P1Šy^©‡¸A±9å”SÊq\'ó‚ë0^bÅ„ö®»îº 9áÓ±ŸõÅžkiFq4w:Îy@fÎÅs «.ÿ삤aî¢Èš <û¹Â¼óÄŠ¥c Í 7ÜàO—÷‰ÏDÿLP¢ãqŒ•NÚþïÿ;!Š,_L&ಂEVœ ,!˜;±ä!nj-›ºêø·ÆÚ²Ë3 -¾D=–YyK~ËPöcA±'P,¿OiBž1à Ïž“^ø Kû­á9Á3’yÇon‹y$mŒÿò—¿„ð›õÆSaò„Ÿ„¹ï.øÂÃÞgñ¥Â°íŽ ‹_µ|‰§^¹p¿æ¥•.'ÔNaò7ö;6ôýÁÜÔ‡ÿ¦Üý‡\õ Þð %Bn÷rÚéoOæ@ùD•f7TiäU_Dù/¢¸ÇÄ V~u«Ÿ->è^‘ñ×xñâEÙ,sP~ó›ßT>½íó奅)BÜð¥Ñ+϶Äû`›WšEÜÔ«ÆÚ]”¼)2þÔÁWM„`¿Fròýàˆ5€ÿjšEÜ@Ú 3xaåÅ2Á9—¥4½ð e«*b`=GPà n¨%Š—ç¢‚b„BˆÐרR‡¶#·ß~{8묳’þðåÒɤ(q&õ–AÝp½KJĆJÚ VE1ª•¸áþä90dÈ´æTœóÊbå¥%Žsá Ø{Aù±¹èÏÇû(ú(ªöâºW9æ–h^Þø@ÆWç}¡}¯<¹ÿÀÚ[ûL/½³\]zþ2/#]°0ƒì@ ÍÌR |‹ýë_å9ÏÇ¡z‰>:`IÄÇs´(q9fqÉh?ï^†Ç&üvCÖ›`©Æï¦D!ÐÙ4œ¸°ÚŽaèf_-÷kÞœáõK¿æLYð…·|!ÚYdð¨0b·_…Þ‹|`VùÖ¿Þûo¥é~”M‡5 —¸‰Ý¤^»ìð­m¬9 ¬n>øÚXÄ]ª™Ä)ë´³¨â_Dq‰êã«/f±’Â5„k¼Lù7Îã¾Â âcjpŒâŽ"ÇK—)Da±8EˆÊE²|᯸⊉rüo4qC•FÀ!ÞÒšjó!Ï\(2þÔeÄ ûŒ+ÖU(|QÝa‡Ê&í6| 4·–4âbe‰ñe\Ž=öØ —8?ßp•ðàk*„ fù¬ªdR„¸ñAyÏ,Y5ÜY2+/*ô×0–•EAó‚•®–FFù4ì%nâüµ”á]ŘóÌý41«Üø2žWj%nø²ï¿ÌSsl$H´ò çq³0K‡Xù´¶’Î??üyÿÕëHÃ8-ŠóÌ”jËÏ×w,@l~ÇÄ¥ó[#n¸Lq´ëÔÎ(±ž( ýX’<ñÄ–4Ùþío«8f.ÆO¬ÔWdxÿÀˆÔãK_ä}ÿy¿Z²b[¢ä.Åœñ‚bï­°ªYÜÔ[6V+~sPîQBc2B…Øòw4Üeï»ï¾„”ÁUÌ+ç Æ@ú1§üïcÁÑ_&éž_±P¿S&/–ÈæÃK¼¤­¨äÉ Ÿ6ÞÏ3ÆqæÄ3ß;FÜ@¾aéä"ËÜýÀžÕ˜âßvÒgÝw\ËÛ7Òòaˆ:üøÇ„¤™=s°´¢°œñ¤å!¾mX†Úoó̬²ìþ„Xb@ð˜ÔCÜ0ß ˆ‰1cR/qžÖnÊL#q9Ç\b.ó[(B@t6 'n†m\è_ZöÛdÊ…©÷]h‡U·ƒÖß7 þðÞå4ÓKË„OþWå—¾òEíÔŒ@^âf™ƒPÎ?pwò€óT^IÀÌ/þDžlM‰qc›ÂÏq¨ØÒdm‹(î^‘¦<”Q:^„ˆÓ€I´Ü<ˆÄËOœ8±ì—ÍU3ûç¥%ÄÝüã_ÛX ”/|Hâ†6òBO½˜}£èû/û¸nñRšWšAܘ»mhySdüi—'nÎ=÷ÜòK<×”"„g[N:&nÞî‡4w#?2¿Pð!°²¤3ˆ¾Ps?"(mwß}wjóÀŸq@Áã+v^1%ªJŒ—øþC‘FIâÞECñð  Š+Ÿœƒèc¾òÛ€‚[âfi Y¬äQ'_­yÞ0ǰDñî”ÕÖ'Hq)ÁA¼ ÚŽuŠVa@]XaíabVvŒ5u{Á* H/iJ=ý\ž°,Àu+‡¸ÿ¯Œ¹cñòÖ±U@5⦞²P³~„ûò‰É,*ö+aÄñDè»'û ”¼e®d¦È²œ4qŽLø]b˜«MšQleDÞC9$!´­êÀ× ¿cFÙyž yVHÌcžÍ|àÀåמØEwCÜæøÞa,y¦±Ú“ÜŸ/»ì²ò)ô#<²|ÌøpÏòlåYËûŸë¤ÉKÜô.¹äB{ò…1Sž«&ž`ܸÆGÒ™0o&N|6¬³Îºåß ®qÃ>Ïs\ã,n²ÜÕê!n’£õ7ñ½“UL¼!ÆØ¬Œìœ¶B@Î@ áÄ͇ö9/ôðÁW›×þö•0û­çrõ­ïÐÂR»`Š>÷½ÉáÕ?íŸ+¯åG §7f]aHµ’¸áåïá‡NªFáÀd×i^°x‰â…=zt…Y.ÄLžØ"|Iâ«© /Sצq3˜_ûó4rÀêLÛ6ƒ¸¡žf7ÕH Z‰›¬ñÃ:Ë Äb±ï‰ÌïCæ fï'>vé=˜æŠD’qãÆ±›Ì­Ž\¥˜ƒÄ¡^æ%$_½ø/ìþ¼í›ÂÈqV½›Ôßÿþ÷ðüò/´í âfûí·OXƒÂhäEÜ8îOÈîåX‰‹ÓúãZˆ›XÅ 2Â,Z(‡ûÖ„€´FèÅÊ'ñŽŒ #=#üã-k²ÅzË•8 ¤ʯ sÈ»_y¥/&n˜_(± ]¬ìšÅ‹ÕÃbÙoŽ)Ë»Š¨ÕHM®#f!°àhÁÿSÎz…ÝÒÒ&{vsž{ËH K‡;·páþ1K Xy4£e[ýi[¬n<™Ì\5Kª´1癄u\ü¼‰ËŽÝO°J£ÿ^ˆÅB d¬Hýoçc¥œyMPØX ž‰C‚€)JvIcúçŸ9ÌYÚÅX˜àúcÏéøÞ0‚0óó!Í=ŽùmV…” ™`–<œƒ(ˆû’‡¸¡n‚ÆûU¹(Ø8ä÷QÁsÛß'>~÷®U´+ž/þöñvÒ,جÎv"nÒž]YmÇBg¿IL¾Úym…€­F áÄM¥•Eå2Íy:WoþÇy\cH祈ân–Ÿ/²þ%—C3ÙåeÉñâÅŠ/É&±âÏW9LÆQù†‚À7ÿòHÞZ‰6²R‹ JÅÏžy6vž¸!p8AŒ~OpÉJ“v"nh_ì6éïKßþ8ˆ1–®X;K„€@É›¥¼¼ôr޻ܯ—ÎÙµä(»ð’¤å~§”o™R~“ù¥|/“_ÒPò7]581¤ ≚Àj–qZ;.¢¸ÇÄ ÄŒá­•¸aìx1Œã(Xý¶V↯r|m5A!õ«W´qCë%oò6ÔSdüIoÄMµ•†lžx"OÜPŽ q9ì˯cK¼Ìü‰³åVš`½`/êÕˆ¾8³Â“)F¼Ø¢°ÅÂÒòX÷TOÜÎÚ‰¢Kàl«‚EÓ$¶ŒH¸AÜx«(kÊ5+ÙÅÅ…{#Mq‹ÓÛq-Ä sÀÇ·á^õD¯•µõŠi ,ÀªåÈñJ¸‰Ý^²”_È“=÷ÜÓŠKHkæ57iAhIç•CŽóŠ·€‰­8xÇË#Sn¬Ô{ÅÖ×÷ß_«¶Û ÏU$‹¸iDÙË/¿|¢Hû¬ÕÚå‰V,‚ ˆ?øüŒ·¹åÙùX¶óÕ¶ÃȧÙ`ƒ rÇôíóikÙÇrÉîµ2cÂЧ‹ïËcÛ¬X(X¤y 4\³X©(¬ÇüªTÕˆƉßOQîœæ"íË÷'ÞÊÎ?OVm–yäõýõÄÄŒ…$[š´q“Œ>Îo¿ïç±Ä G"„€èlNÜŒüì9a‘ÅG–ûõúåÿf½öDù¸Ú΢K #výy9Éœw'…I^ðU |R;u#—¸éªËwP-¤ eQÜM!·¶4ЏáëšWÚ,81¾ê¼`nµÕVVeÍ7]¸¡Ãµ’7yIê(2þ¤7âæþRü‘_E \Gxy'`¥w?óÄ EùZnD VXHxñéùbÍ×ÁX˜þ iš«í`Þ)ˆ2Ïö4á {ü•×§Ãm‡}/>fJ9iì«0ÄÃ=÷Üã“/´ßÄ 0’+âñ¤ ˜§'¯4‚¸a%.¾è篌‘§qƒr_† ñnZ”+¿Þí%&n ™w±@î±*˜—4Ü!¬ Ä$"nR«ÅMZ|ÊKëV›Ho×PÀàÈ"nê-;ϪE1)#¸–Af@Çi鉷̈ûãÇÂÒ›EŽá_lµYÄâÆÊ-²‰›,÷b÷`hâçB|ïX¿%.’wßãš-Ãq4˜sH\:‹¸Áz†{&vbå;È’,±Ö<å-ø¼eu@tØ<ðd›•“…¡ÕßnÄM옅¿Ë>Ö‘ï»õM[! „@g ÐpâfØ6G…þ+mQîË»_Þ¹}A‡òÉŒ!›_ãå«ÓŸ¹%L¾á§åcí4¼Ä µÅîRsÞy)“Ùo>ÚV“b,2d™òõ"+J‘)v—+Te§£ÀÉf…iS«QÜ›AÜxwú€2d"9&Э_­£§XÜÐw¤(yS„´¡ü"ãOz#nü8pÞeÆ^ðoºé¦r\OÄœsäÈ‘‰•/Ð(EX¦Lœ8ÑŠ Þú… šÄœðñƒbo+š¤YÜоÒsAɼòÊ+}1 ÙÇ‚‡yŒ•Š6«¤aá]³*ê,âÆ¬‚PRpƒ2‚ÀÚ‰Âm÷aÑ×j!nx¶xÒ Ò/^š¥ Ń9Ã>$7V>‹7i‘}àbįö‡„sËf –—¸ÁÎq¯å«w­1n¼²n}b÷ŸûÑâÄøtÕöc¢ÃbÜÔ[6Áo±¬3a¾rs¯a ‚õ ·—˜¸±k<+˜×Ä]a½iè·/ŽÉÜ,±Ê)*´Å¬²Èëɾ¢e¥¥‰Òx·:ËÃLj+ïÖß;¤ #7,dˆw/eŒýªSŒ ™·ÂM›«iÄ ÷3Ï$Vqò VqÐr}®3Ïh31° Ò>ýéOVö3Ì€€æ¼'/¬ÍÜ—Ä,Câ`àV†mÛ¸anú•˦H¬|!nPfP&Í‚‚nÅîÄÆ1+%붯?/qC|!ÒšÐOˆLH(/o¼q¿…¶amæ—úf)jÚâ%ϪRYÄMZÿy&SÄ xq15å•©LIÏ"nê-;+ÏKÉ@IDATý¬:¯ÒdÄ ÷!«¢™0—øý0!°9®Ž^xŽ`9»Õ¥¹¾A>CLÌ?_)ò–ŽL²Æ€ëXçð¼›0aBò±<Õ¶iÄŸ—äeÎñŒ°vrÎ+íñ½9î=>Ïf\EmÌã@Û”kÉòì"¶¿ã^â1ä¾Ãê¹å%î¯ù}»…ä<뉩wV‰PO‹oÄ=çûåË2ÂögI^â܉+Ñh¿kieÆä¿qžì³<¼a5E<5³¾ãÚÎ;ï\þMµ´~Å=Îá.¸ÑFÙådËóÕßu „€h! 'nhûÈ=KJ„·¸˜þVxëÆ“ÃÌ—JíÚbK¯†nýÍЧÿÐòu,;&ýåÐò±v‡@â†Zo°4n¯š0õÁK”{Ï+”·(qcK|CΠ¸#ì×c]“Öà"Š{3ˆÚd®5Ö>¾˜¡Èðâí•)®÷Dâ†~wDÞÔBÚPn‘ñ'½'n'6¾Š2V;í´Sùë8Dж½ä§7”‡"@ÀQ$^ž›/Ä|åõ/Ù>°1_¯ÍÝ*&n°˜°¯×ô©“ÊÞÿ‡[žÿŠì¯åÝã%‘Õ«~Ú‘T#]Ì*†àÆX¿d‘7µ”ÁX‚Kƒ-˜žRRœPX9†8Å2Á 嫈ÔBÜÄ–ÔÇ3¥òÉ'ŸLVCÁ’ÀÏbTÙró±òY„¸¡®ø«5çˆÙAàS”0ñ_깎•J2’—¸!mìÚáðÒK/%Ï;Ls$}š"g.J\G°>ýÌK¬ÔW# â’¸·‰‰aøb€5…Çò”6#YÄ ×ê);W‹·„bÍÅ=1þ0â†8%~2úä—ôF©ös…Þ,nb‹ ú‹ Ög<ÓÈ‹%¨Yüq+ ”Ë1Ï«KÏ„Ãß…µ Å‚ÿ¸‹QÊxc-’u¿û|ñÛ5È^Ü4y6A°n×™¯¬´ˆÄÛ½CÀÁ‹'4é;OÚJ|`‹•ã‰Òðò±§HÆ>&åµ-ñ•lÅ6\-¾÷qÏK¬w ãv€5$¤=ã¬Lò0W ;oó7>4åB~㊗&yˆ›x>rÿ›¥Äñ=A~ÌEža<7 äïï]~kÓ>¤µOç„€ÍF )ÄM¿6K~lüBmŸöäuaú3·†Ùo.ˆ}ÐwÉ•Bÿ7Ç~l¡´o^÷£0ã¹;:¯õ#P”¸¡ÆZÈ›ZHê*Bܤ)áFä)‡z;’"Š{³ˆV6àkcšðâ_Hz*q6YäMÚ|IÃ2í\‘ñ'¿7(±«•0¾¤ˆ,Ò$‹¸áºìßu×].7.}%Ü·¤@XŒÒ Äš!|zâ†<‡–í¼Λϧ㋱­Tâƒ2û4iûÖ÷ØâbÅ» ˜g­rRkm>Ø.÷ZŸ’¥“­øaÆX‘ZˆÊ'–QÖs ®?&3²”OŸ/¶¢ðVÌe®èó¥íÇÁR‹7¬ž…"ç)êÈRv}`k‹Åã°c¾ÂŸ~úév˜lc¥¾qñJþ¸ÿ(HÜÖ8>F5⦞²ãØ,IcJÿhWÜ&»fÄ ¿ñ2ç¤Á‚ ÂÖîWË»çqÏv«&‘Žz!Tb2€{wÆÐ‹'d8Ÿ»i«R 7V1ó[Løk~?c-k?^ñ¬Ú½Cl«Ø†yÂ|C²È«¼xŽx‚Ë7¬$™LÙògmc‹0Ès³ò÷&sâ r‡ùO,5#¨Á«OžµìÌÜ;’Žˆ~«Î=÷ÜŠùé ¯¸ü<Ä_ ‹üñŠPEç8¬,&B@´M!nè.*,­[‹L)-ÝŒ‚%iµ7´·©!ú ^µaÄ´yçγ ¹Gù‹.Ì”to]cJ{­Aˆ}[ü~Å•yüWgÜ\x6ñ®¼Èú¼¸ù'0)÷Aù:Jlÿe ¶¼8ú!¼UFÿË [y‚¯n>6B—ù ˜‰¤kçªm›¹xµzm†¹0«4Ox¶T[ò»ZyEÆŸrŒ¸ùûßÿžj;î¸c…à %óÀ,¬n¾|“–ys•²kÌ ˆSQ$ÌÅÊÒð2Œóo–“gif[¦Ú[¸ð’×aå¤mAÜ@€@„ i®Giõr« ,:ÀÍ/Ì5#d:"‚ê)ƒ6s%VD¹÷PŽŠ¸HÑf¤V↼ÇsLâvÆ~–ðlÁB«“jʧ¥©FÜWÈ2O[^¿eþAz)BÜÕ–°¬É"¬ì´º¸öÝïÖ]wœ% ±2Î…X™«FÜ>oÿ§NšX añfR¸©§lÚijÁ+þV'[âyĤ®7\'VOl©ÅùXè ¿3~Ná®Éï½[Äyì˜g¿e±"Ì<Â Ç Ï)âËx<ÀÝÇ Í«YœXÚxŒ±8!îOš y <ùmô®;ÕîM\Výór 4#9°\Áâ%#îS~ ÆÑÄ7qû-MGÛ˜¸Ám–vÚýDLÞWb"­£ró\áw ×lk eb¹†5Ošä!n╹¸8à€Šâ¼ËXÅ…èR 74‰B ]hqC­·wIIÚ·P_§Üa‰´¹¨P%.†€½\yßß"%°ÚT¿å7 ‹ŽS"q3;íÍ0ëõ'ÃŒçïï=9¡Hq ¥-BܤYטUE‘rjDʉ¢Š{J =1D Lðy)oWé,â<õÔS5CcãV«r€õAš{JV\&qL¬ì7€†{åÐ:[àö‚Û’¾Úó5×­X ‰u‚ [,gÄ;1!> Êe5ÁEå+¶ü Ïô’«áù%åïÚk¯M-eÙß`AáïÜ›¼rçƒ)§Z:Iü(ÜÒúOÄ_Á=&VˆcâK™8ÞK­eó{€ÒËoƒW†±ºƒ¨Ga7áYVfe*,Ç !|~ò0¦Ìw\š 5b|Á: ר8/uAÂ`ía$†Ïχ‹×Æyˆ Æ(M 8è'Q›—LމÈ9\¸˜±µã™µ”—Žîx3òÆÖIœcN‚5„$®ZÌß7Òa-f®~ñåz!þŽûDžØ2«L>2x¢*OùÕÒÄîo~52ËþæŠǸ[ڸݱ5!éˆåáÆdÞáe¿¿V[,[±bJ#ŸùáÞ%6•D!ÐN4•¸¡£ý–Û þÈþ¡ï°•ªö{öäg”{Î 3^¸·j:]¬{i¯•¸©¿ÕKXæ ¾p”~tçÍ ½z÷©šØs³îZ´Û2§=«Â¢Š{V9=íüâf~xéìVŒ«†Á.»ì’¼tÝ7â Ç¿Z9þds£ž ÕEÇ?¸ñmjÔ¾NÌ‹/ʺÅÓàk2fîX¹ð"ë­°U¿Ê©ˆ”‹zWðB „Ha.<þøã- ¤ ÙµÊ*«$dÊ=-Yô«ÑéI<'\AQó(šÄ\ñ–X4Ó¢‚ûý>|xb ¹‘FNÔRW­es¯3&±Êd,Òˆ–ŽÚ³âŠ+&ýÂ5÷H’<‚" ¸ 1 &˜ÕâÎ/míWrJËÇJWôñá‡N»œz.¸±`ÍÖf¬X!¾±Òh¤`ñJ`\°Ä¥Ç cÍoý2l‹-*íZ½[ÈPÈ2oýÃ=;£XùÝ‘øs{çJZàzÛ`ù™+<à q=¡j׋n X‚]µ±ä™€Ï‚ª ˆû·£Ø=EÛ£ôB@F!ÐtâÆºØr ýKVWÿ¸J¶ÓþsM˜^²Ò˜ùÂ=çuÐ<Ú¸±%ÈçM{-ôøÙp"fac×›˜˜²‹*îÖžž¼í½øÈ0p§SB‘%áY ÂáþAŸ aÀȶ¯èø·Š¸ VÊñËÍ¢0aâ]#x)Gam–"Ð6ÕEB<“u×]7!°Î4¬Hp4Áâ ÷+Iç#À³«OX¤¹IÕÛÒjÄM½ew”Â7+/ŽX¹@òá² `A—I Ïs½ÒÍ×SmÂŠÕ }€ï´ôÕ, ÒÒëœB@4–7ÖüØ}女w±KÚ¶v'n†mwlè?z“0çõÿ†EF¬–¬o걨訢¢Š{Gåõ„ë}WÞ6ô[¿'`âíaò„Ÿäê2ËE³rÆc½Æ…™KU.Ë™«€&%*:þ­$nè2Š ^2( q0™ÇeBÒ`a0ºÐ—&âLHš‡–,ìÝ"psÊrÉh^KTrŒî~XšT NkijÙv&qC{Y°+¦¼’h;oÞ¢é°öÁЉ¤x&ñû‹5 „ +>ø`‹¦h¹J/„€E@ÄMcñ쥵;qC ¡[æ½ý|è½ÄòmƒiQŽmÞ‰ °Íñ¡Ïð1á­[NÍûˆ80ãÆ “f /-¿_'¶¾²ê¢ã…‰9qgâ8#•%7î LóyDzwèJÚ ‹oƒR”g9ôöj}×kMG…8&Ä—‘t#GŽLVè2²KAbÖà¦Óhélâ†ç11€x6W,m®ø*ÕPÒ5! „@ÏD@ÄM÷v'ns—*juÓÌá,ª¸7³-]¡l³¶)â&eý2w©v²ºÑøÛèh[/fmC|¹IÕ‹fþü>µ`¸Ž?¾j Œü%+e-H–?¬LX†ºÂÒò>1±w–Üja•´í·ß>‰D\„`¸Ä%£MÄ»ÑÞêQQ}B@®€ˆ›®1N meW nXz|ضÇ$ýžûÖ3¡ÏÐêÁ­ PFaRÜ3€I9ÝçCë„[\™|ý …—†7« x¤ßÖaö1)µ´ö”Æ¿µxw×ÚŒ´¡²¶é®£¬~ ! „€B ±ˆ¸i,ž]¢´®@Üäà öƒÆí•`Ú–7RÜóMo³´!õÔ/ Sî=/_Æ(ËÊZL€v°¼ÑøG¤ÃÂxÒ†ø*>úhá2”A! „€B ç! â¦çyè*Ä CãÉbÞÌ›ý^è=`XèÕɆ-w HqÏFŠÕ£úŒ\+ô]aó$¦ )ë!m¬&OÞLë;2Lœ3*Ì\|t˜ßo¸Æß@Ò¶­`õ(–Ô:th²¥±"mÚzÈÔ8! „€B@´"nÚnHšß ®DÜ€nSC6>8ô8¼ùਆº ¦Í;wžUØ=*«bܦƎ›,ž•Fç…@W@€˜6,¬`Ä]a´ÔF! „€B@´"nÚg,ZÖ’®FÜ0¬6Õoù â#Æ”Hœ%K§{Ù%m;ùaî´7ìן 3ž¿;÷êQE› Ã*$X0ôë×/uÙë¢e*½h&Àe)vV›4i’›f‚­²…€B@!ÐqÓ7«k]•¸ÉêÎ ! „€B@! „€è®ˆ¸é®#[¥_"nª€£KB@! „€B@! Ú7m4­jŠˆ›V!­z„€B@! „€B@Ô‡€ˆ›úðë’¹EÜtÉaS£…€B@! „€B " ⦺ˆ›8èê²B@! „€B@tIDÜtÉa«¯Ñ"nêÃO¹…€B@! „€B@´ 7­BºêqÓFƒ¡¦! „€B@! „€¨‚€ˆ›*àt×K"nºëȪ_B@! „€B@!ÐÝqÓÝF4Gê%n–[n¹0räȰÄK„~ýú…^½zå¨UI„@ë˜?~˜1cFxûí·Ã¤I“ /¼ÐºÊU“B@! „€B ˆ¸i ˜]¥¨Z‰›±cdžþýûw•®ªB A`úôéá‰'ž£ù „€B@! „@—C@ÄM—²ú\ q³ÖZk…W\1©|ÒÌá~«†™‹óû ½z÷©¿QJ˜zÉ>R+iOE‹°!C†„Ñ£G—!xöÙgã>Z>ÖŽB@! „€B ÝqÓî#Ô„ö%n;ôéÓ'Q„Î<óÌôÐCÃ"‹,’$½à‚ Âõ×_ßQ¶Šë"n*àhØÁÉ'ŸV\qÅ„pü¾аr;*¨'7`sâ‰'&„æç?ÿùÄ °#ÌZy½3‰›Áƒ‡þàaÙå–+wùŽ;îÌÕŽ¢çË_þr4hPBF¦¥Çå /žC<ð@Z’ŠsÿøÇÃXQÞî»ï^‘¦£ƒþýû'–KØiB›è_-$^#Ú—Ö&B@! êC åÄ »—¬•ˆ|í7Î$nèI½äMw%np/Áj€¯°&EÜ4¢-(<ö…EbÒ¤I¡_¿~àÀç~ò“Ÿ$.5Ößj[Ü(âÆ‹ˆFçí3Ößþö·“üîw¿k™›ötâfÙe— §vZ‚ýå—_Þr Á¤â*ÿ:‹¸Ùd“MÂá‡^&y­‰7<§Ž>úè°öÚ¼¯à†ôÆo$$ ¤ö°aÃÂr%2ˈ’ŸÿüçaÞ¼yvª¼å¹~ÜqÇ…ÕW_½|ÎvŠ7¿þõ¯ FòC‚ßrË-áwÞIî¬Þž¯GqDxñÅ“ãŽþ5²}Õ¥ëB@! „@qZFÜô8<ô04,²øR¡Ï€aIKç¾79Ìy÷µ0ï½·ÂÜioo½rÔ„@³ˆ›Ž\¥|cë!oº#qó¹Ï}.q'2Œ0yÇÚ§3ˆ›F´e饗?þñ“î ðsÌ1á­·ÞJމ²Ï> V{óÍ7Ã7¿ùMëvæ–ò(þ0£¬Èâ&®N¹ð«_ý*0N(Œ{íµWªòÚ¬†õtâ\ÿð‡?¬K¸ßìþjÞEËí âÆ×I{qm2R£q3|øð1b„Ì«¯¾š¸¢AÚ¤ „%€FHã>…¥¡—ÕV[-!m „žï¸HÙqâ†XYôÍÊÁzÇ»^B†;6¹Ž)TGÒÈöuT—® ! „€µ!Ðtâ¦×"‹…E–X.ô_~£ÐoÅMBß¡+T´tö[Ï…ÏÞ¦?W˜óö aþ׫¨ Í"nŠ6µVò¦ÄͨQ£’å ß~ûíÌf×ë*……õ¼üòËU]<ÉÁËü/ùËð™Ï|&ð½ÕÄM£ÚBÌû²LÌ‘o¼±ç“N:©¬Dp áÉ'Ÿ¬¸ü äêÀ×mä‘G ÿûßEˆãβ¸a|#þž~úéÄ¢ˆö¤‰W"}Œ›•W^9 0 <úè£Uçˆ/“/ã£G(˜(•Ä´`Þ, î»ï¾Ô¼Ä¸Àå …˜DÄ&ª&(¹]tQâþ‘WY¤¼jí_â#1'éçĉ3ƒ:!nŠöÍúM{ˆon=öXî@°`C?qcbžClåHâª_…10â3+ï!‡vØa‡ä2÷s²ÙÂØ0!+˜‡Y}ËšÿEÚ‡…ËJ+­”ÌÅÿþ÷¿™sÁÊüÛßþ–ìÒ&žAŒÙOúÓä\5âË%îiòñœáù’G¾öµ¯…­¶Ú*µ|Hg³|å•WÂøñãÃ÷¿ÿý¤2!nŒ %±œè›—%—\2àÚ˼AüóƧóûlŸ/WûB@! „@ãh*qiÓwɕ uvýVX°¢JVÓg , "í5ñŠ+ ³Í»n[ ¾X1Ä ƒÕšPزlP\=©b·Ûn»%Y¼«ˆ•቟֮û-+ç@ÎÄäip‹ÃÝ!˜62^|Ùµ£#|Á/v¯ëˆ¸ñõûvÙ~Vß  ²ª J1ʱÆÐ`ûó¶²Íü—K_k­µÂñÇ_AXY¶äƒÜãžIPˆ îÿX¼ Ï-âáä–+k®¹ff¶Ë.»l!+?ÿ=qC[ïÑ%««*ÒZŒæD¨'H“„ïÿcÎóü„ìãùÁciÆFltDÜ@þ`½™—ùÉsÝ\5ù-Á2’•¡HO u¤ÓN,ÏhŠi;Âó‹gþþók^“Ë$/qƒÕ×_þò—$O5Ë6o*m<’Ü¿FµÏ©]! „€B Á4¸éÝoH°òaÈG?x‘ÊÓöwî83¼÷ô-aÞŒ+OåÉ£4ÅhqC­ÄµÁò¦«Ê)BÞÔBÜP‚Byå•W†ÇXްEPh™ã¦X''Kÿ,¾ yÍÂÇ®±ÍÛÚnd#VrÜc(ÀX1Ð#^!±Ò0©FÜÔÚ7,eÎ?ÿü2iI‹‹ Ê3ËÛãúb„„£¸¾‘ÈòòØi§Ê±¬Þ}÷Ýd(ë.\fÅB¿ÉñÀcdeb„uW,Ô…Uy ‘cÁ:Ь¼°ð³%Ôãtñq<~”Ïb‚ÇÖ[oU"ÈXš7¶òðó߈°ãùd$&D÷—-˾fý…õ õa©ÄÙ²4gû—°6‰É;oÛŽˆ,T°TA Í"‰y  í´ñgþó»‚@\2?âg@œ¿‹¬ôXA á¹Ã²ßiB,žãÄ3¼¨ÔÒ¾¢u(½B@!æ7¥š¾ÃF‡Á}1ô[výü­)¥œñâ}aÊ]¿/YÝL ¥·¢By•8$nÌ]Šš[EÞÔJÜàj€bè•N”f,hxçë­WdbâW+bN i„„¼Ð£¸<ñÄIZûçã¾´‚¸¡^¾f£¥¹,zê© €›”),ÖV¿µ~åuÛbùo Ήòe¦oqk ÌÍÆ×[m¿3ˆo”…k.\à¼xeç·¿ým¸îºë’Ë^qEa§¿>n Ä .ÌC,ZPnM Ø>ýéO'‡(§ä5å–“ ÌU tŠõ’¹Óx…E…”¹ËŸÿüçDA¦\¬gâe}\ ß/+ÇÜ7¸‡ÌÕÄ®±ÍÓH/#ülN0oM†š(£¦ÈS&–2H5â¦Ö¾¡È㲄à yñÅ'ûöñÇ: ñn5Œc†`ƒ†H#a¸OÌ‚‰g†=_p4—ËëcØd)ãXã`µƒ@ 1O½`b±§Ò\Ú|Z¿ïÇóô)vodò]wÝ5É[üøùoÄ Vi)V<<‡½@$Bº!ÌY#-M<ê%n˜·ùžPƒ4½ð “û2ž6so@æø%½=qþŒyæïUk;[ÿ¬Èkq³Ë.»\-æ#D\–˜õ+ “ŠJ-í+Z‡Ò ! „€ùhqSªÑ‘«‡%?6>`ySD°´yóº…Y“>0).’_i;F ‘Ä µ5ƒ¼©FÕJÜð…ž¸±@欳Î:a^IÉ9ðýø¤ñÄ ä_;âØ×ÕäÄûÿˆµÀ2±MûÂÉ—t¾ˆ#õ7(|åΔYSpó0W”'³ ðnRà$Èe–%n°.Ú{sâpÍð1T|FdÑV,NŠHg7^!'®.±`ípØa‡%Š_ı@¼âš¦Ì“k ,[ÀÃ[Kx7$”KÈ‘Xp4Å÷†n(»§y…ë#ž|~H#sĽ‹X9± $s? ±µ çLILS°¹ž§Ÿüä'ÃþûïOò„ÀÁÚ$?ÜÏ´É"nÑ·¸ vì­PæÍ-Æ÷«(\Ÿòs k¬¬û¡#‚ â,¢˜Xä<ÖKÌ!,nòŠ?,_Œ˜ógYüøùq nÏS/ÏÈBîˆ0#é|?îõ7æFæ]ñü Nž¥ ž$óÄ·¸Êò×BŒ˜5ùãöp΋–ÄF$úëí×Ò¾ŽÊÔu! „€B všFÜÔ÷ÄÜojï–rVC ÑÄ u5м1—©Áëí•þÒ¤V↯ç|5Å“ÞeȈŸ>+f .,!‹ë k%â” œÕCܘe†o—ßGy2…ë[a…Þ\8PĶÛn»„$À‚cÖ¬Y¾ˆŠý¢Ä _ôÁ¡­·ÞzkEyv!•ŠdGé âb‚ñ®1yÚíWWÆç%x0"‚›•Ū! îgX¡d-󌥱/Øzw)¯pã–rçwú*“}Üå ÜPlîÄ Lˆ¿àC"ZÜ ¬ÌŽ=öØ8kq“Õ”y³D HSÔÍu‹ &L˜Pvñ <.FDÖÛ7ß\z,ŽË„[,/‡~¸le‹"÷œ XAœe­KŸúmûV†m±jÁÕ)&÷ì:V ¸c!I±å”¥+ºõó+2¬®Ò—GÜ÷ïnäç?„‘"¾ë®»ÊƒãòŒxð0qžo¤Cê%nxV1¦žøä¾0wÒØz÷5ˆÄ7Ü«÷ ·B\Ó¤bBÜH]Ú‹eV–™še—•ÏÎ×Ò>Ë«­B@!ÐxšFÜ,ú¡µÂðJA{/R¨ÕóçÍ o\=>ÌzuaˈB)q&Í n¨¬^ò&iC=µ76æö@^PÆq{@P¸L¹M#np=@9yóÍ7}‰;nIHµU’øŠÌ‹}3‰Úm®2´Çâòð%ßúIlÞòu—¥jR”¸ÁêÈ×4«Ë°À*¥½ˆtqC $0c¤Åq©Ö~¯¸š«HœÞÇÏ1âÆ2àdŠbœ—ã JqUˆùá ¯pc!Á‹+_K;Žcw`½õ»ßý.Iše=‘§f­‘VgÚ9o‘EÜÔÛ7,¨ ¢˜o&­-ž¸áº·ºñé!\X5 ‹3ÄuLÖ§¯¶Õ‹­ZdéXéÉb+¢âÇË0b ¥‰·ˆòVi~þû|¾ ç¾P£ð ¼¿òÊ+ɾ'nxù;vlrž¸¸$Äñ#,=ÁemÙÙ$ÃûÿPÂQþÙÖCÜðµÚâ|øòm‹ oAÃË>Ë #|[Å ‚ÕV“¢Ä e¡„ãöÐCb餉áE€^‹m’–.í\g7F,t¤¥µ×+®íFÜø¶1§ýÜñ}AÄ ¿z·zˆ+[~¯øgHFŒ‘ÇÇÿ±2lK;¸N\sëÊ"nêé÷;¤¦ ¦~Æ| ¯ØBì 1qÃ9{H¬ˆ,þçM° ²€á¾ý\Ïê?}·1‚@މæ–­‚6~e3«·–­?z¬‡ÒÄVyˆÊȲ±@ÊšFÜ,2hdX|íÝÂÀ5v.ÔÂi_Þ}äÒ0gêÂ+f*H‰3h&qC¥EÉ›"¤ å×JÜ Èðµ;¾\P¥ÌÜAHcÄD„|-ð'å˜ÒhåAÖŒ1"qÇBa$@§LÜm©ðzˆ_fÞ},"P°qジaùW”Ï´ÕRâ2k!nÌ¢ëÜ bÂ̯ÀŪS{."AÜxWž´%¯«µß±hùÒ,n¸Ö(W©,„€¬P˜y/Qmí«¶5· ï¦åÓ{Å?«ÞUŒ˜Yä…/×ö=ñá]¥êéí´ûâ²rÀÄViÄ¥cKŒ \n ‰mu0Λ+‹ÙiJ7iŠˆèË>Ï™Fˆ?OlÄeã"¹ÑF%§³\¥°âùiÉyîro±HeVÚâ:DHL”“Ö[ ÔKܘ›—'Dp3KÊxÙw#riV?æ.h–oô‘{:Kj%FŒÐªÖ_O8¥Ïj“?_kû|ÚB@! ‡@Óˆ›^}ú–¯‘XÝô8õÔSIìV~âË0fðF ðÅôꫯ&Y"1qÃIb2˜û/·>V–9¶Z éY© Rážà«1y[Mܘ`À^Pò^xá*u¿qc }ÃÉV4hP¢˜à§”Ü¥ÌÊ1Ê®0…ØB‘5aye³zЉ1Kö3ˆî↠ô—xA±EVx•ùsÍ5×$éÓ×ä‚û—eqƒõ J%‚r ~Ôo‚âJ]æŽãW›ñ waB9QÌKÊM[¼O<ñÄ„"€ññ¥^XZš1'–Ö±äi¸™ ŸYjÅVk,‡l+äp¿Z,’,â†vÔÚ7#RÀ„9é—&ÇÚò’ñÄ AG`1~üø…æˆ ì-œüÜaîãBäÅêÄbç½÷Þ+ß'>ÍgœXíŠ6{âÄÒ@,âÊ¡À³ÎÏ#K“¶õãÇuO8YzVÃU ‰WÆÊšÿæžD‚øbd²ì²Ë&óÁb ñlÁìñÇO– ÇšÊ[/ÕKÜxb‡çº¹ƒAþaýcÏ|k}¤=iñ®°’»üòË-iê¶1ÂsëLÈ!æ¹·>#V2yòä…b·AØ™ëoعZ¹q#«µ/N«c! „€B ù4¸±¦/2øCa±¥?®¹Kf¼âÚL{ìÊ0óåœ)¯ZVm›„@«ˆšŸEÞÔJÚPf-Ä „ ÎM“´X,iÄ y×Xc$V /Á”É*A( –¯´ÒJÁLùŠËŠN(6(­&nh±fLÑA ÍújNZ/ÕˆÌð±J@°àk°‰Ä90èSÂLj,b„€•_ÞÚ_iRVvg7ÔíPÈ”»›JËV¯²Ê*Ò‰-Â5‹Á±ÏWÔU |QÈmìPºqyƒûÈG>¶Ùf›òµKDÜáïÚ¦^¯pW#n¼5sšàÖŒ'VT,‰ÌXšæz™ÉœGÒ¬‚ò¶Bˆà®Èô9qCi9u,— PV±C P`mîT#njíÖ Ä AXI —>âê€ùn»í–¬,•\,ýóÄ $¹DÒ‡óKË|ƒ'Ï È}öéƒw‚88餓Ê-x&1Xx°RÊ88 ÅÜ—±˜M¬°[:s?ââÆß¯–&mëÇë´ý¾’ÅËÝ3/¶*­°d+‚qÝ/ÕÎqÖüÇj†ç¬=Cbw¤Ž‚6Ch@¨@jÕKÜÐN#ϸG[Ù ‚Žy„kÏxæ6Ì?ÜMùÀ,†˜'FØRf–T#FüjV±…}¥n»™'¸s÷)Ì/$¾W«•·³Zûâ´:B@! š@Ó‰ºÐgÀÐÐgP‰ÀµvXtĘ`®S¸FÍzýÉD¹Ÿ;õÕ0÷½·šßcÕ¸ÊH5± —÷<´Z²¯ÅäͬW SîÿS¨¶äwµBk!n P\Y•Lj+ê@á%œvö½X€Ý;¼jŽ]‡€ùÔ§>•òòN°âXP¶PäQ¼¨…Øb¿tDÜ 4.·Ür‰âl…ãò‹£øñb¤¹/e•W­-|eßzë­“¾olÁC}(¹¦dX¬ÊæÞEŠk(Ö_”PÚ™&~Ų!¦ dÙrË-“äEcë‰XF(ÙYÂ8Ó7[ŒtYŠ«/#Ëâ†4Xz¡t›‚ëóÙ>Ö)Xã@™x…»qCzP[ÞÚòÇ[‚ÝÆ±HóB ‰•EÎåm '+õ`!VM¼5éª7\¯¥o¸Ã@$™L9^x~Ú³Ä7X@… O‰”¨&—]vYyåKQ;gV¤ãyƒ¥ä‚,m ø¬ÕšŒØ!_•‹ãjâÇËoUçÃÒĖökÕæLXù@¿ägNâ¢Ï}ž«XñLÅÚ¬Ä ¤º¹ÅR>íæ~n†T#FüµØu‹¶@`BzW›'¸iÆV–•ëûéÓz7_ŸFûB@! „@ëh q³ ;½Bï~‹—¬nFWônö[üï–ÎU*͉tÐPLÙhqCã=yÃq­¤ yk%nl%«1cÆ$_(qÕÔi„ è¡pò¢F$ðeøÌ’… '|Gùìž(‘qbëVNë•‚?/["¢ø‚« .kYÂe’FKZ=õ7P(×|}÷ŠÄÊ#–ñj?>^Çç>÷¹ÄÕ%nŠÖHZ|\G¾UZ ½P/x¢¤ù!V޹MàXM°2 càË‘KJ.YîÌ[~ È>/Þ}££vP7q¸Ÿ<¾”‡; ÄuxÁ*%‰KWKß pw;»gç»ï¼c;Øi§¢×DºO¦á澑Š6ä}ûÛß^•'Ò!øà¡E=ðÒðâ¢í?ké…澆 \ûc£~x‚!|¤V­ý#ÎX tê–±hJiÃÄhÇ6ŒÎâ+á¡g1‡ÒýóÝæþ‹¸šexAÙð'®§¬ánYùj]ÇþÈñ¦ÃÚðÞÃ#’öÏp,†Ô¥F›?¦t>Sòøý±a>_‘r-}¥úY-E@D@D@ZG e   7Ž—8ÂZ„23õ«ÍîÑž7n–8¥ZQ…€m˜ý¦•ÃæªTM›E@D@D@D@D@D " 7ñtæÆZ…(xñfîð‰aú+“‚¥§„ÞQš6UxQÚnŠ’Úé˜=Š ¦Ì¸Ã“h3´Û„Ž^D@D@D@D@# 7ƒñ¬5Xçz„vɰ©õÖ[/NÞ`”]ZJ€˜6‡V0â–b×ÎD@D@D@D@D@š@@ÂM ¶"ênì8p&NœðhU IðH™´ÞÞÞ8›×ìÙ³c U 6ítvTZH¸©…V‡¤mT¸é : h{nÚþ5¿‚nšÏT%Š€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@pÓTÛ¼L 7m~‚T=x€„›!Ø$Ü Á“®C”$Ü ÊÓÖX¥%Ü4ÆO¹E@D@D@D@D@D@D U$Ü´ŠtíGÂM UED@D@D@D@D@D@*pSN§n’pÓ©gVÇ%" " " " " "Ði$ÜtÚ-p<n @RhnÚà$´º nZM\ûúH¸©Û Î%áfPŸ>U^D@D@D@D@D@D`p3„N¶ª„#¡¥ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€´7 7í}~ú¥vnú« ¦pÓt¤í_ „›ö?Gª¡ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€@@ÂÍln†àI×!‹€ˆ€ˆ€ˆ€ˆ€ˆ€ Jnåik¬Ònã§Ü" " " " " " "Ð*nZEºö#á¦N†ª"" " " " " " H¸©§S7I¸éÔ3«ãè4n:íŒ8 7 )‰ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€´ 7mpZ] 7­&Þû›4iRXuÕUcen¾ùæÐÓÓS®Øæ›oº»»ÃSO=þ÷¿ÿ•×ëƒ4›íÿË.»lxå•Wµ×^—ÍÞÏP)o½õÖ ãÆ /¿ür¸ûî»ì°»ººâ=dñâÅ5ÕaÝu×maîܹaÚ´i}òæ¶2zôè°á†Æã¹ÿþûÜ9sú[;Yj©¥tM¶ó RÝD@D@†$ 7Cð´×"Ü ><¼ñoŒ”z{{Ãå—_jy0ßqÇÃØ±ccþÛn»-<ùä“Cx{òöÛo_nÎ>ûì°páÂrÅÞö¶·ÅÏÏ<óL¸âŠ+ÊëõapXf™eçqîÒK/-üí¶Ûn‰õå¥\óm´Q˜0aBXzé¥Ã°a bÇsúôéáÑGõÉkú¼ÓN;EÑÆg:ï¼ó¼yóüªŽû¼ÕV[…VX!,Z´(üóŸÿlêñí¿ÿþ‘€²Ï:묦–]­0îí›nºi<6ê€xC=)ažxâ‰jE„ƒ>8¶±_|1\tÑEåôƒ½­p M:5÷Tî­ílh«¯¾znø½G D̿馛â÷´þË-·\Øf›mâêûî»/<òÈ#i}h– 7+îÿýÐ3ÿÕ7OÝ£– Ïœ2ñž£@IDAT÷™&ŠŠ)J ᆇq:f÷ÜsO¸ãŽ;ìkÅåøñãÃî»ï^NƒpÃÃl`H¸îE÷ú†7¼!zÌŸ??\wÝuE³e¦£ÃHÇ‘õŒ3Î(§Áàõ¯ýë-^0µˆ7y6cÆŒpÍ5×ämÎ]¿öÚk<»Ìñ²¸òÊ+cgßÖwâr¿ýö cÆŒ‰ß¿ÿýïM=Än–_~ùØVö²ŒŽÿ-·Ü|ðÁ¬ÍqÂà>ûì??üðÃáÆoŒŸÛ±­ R RaüŽU¥öØc(hÁ¡Ùç7t¶ðÖ(b»îºkXi¥•ÊI%Ü”Q Èÿp.›9wzÀ„Q£FåŠ*3'wÙe—0qâÄz1Ø5™®';Þt¢ñšÀ¸ÖŸþùèmƒ ƒ·Žm{ì±ÇÂÕW_ÓýCŽÆP½Jú¢e–to~ó›^Px@~úéM­ö@7´…½öÚ+â`èØã™ÁñÑî8V3†Âå ¿DD´áDÿú׿bäkǶ‚7Šy—Ü{ï½áöÛo·CÌ\¾õ­o x®Àæ‚ .ÈLÓ+ñŠ]yå•cU8xÍš5+ ÌñûmÂÞ7çŸ~Ÿ*¯¸âŠa·Ýv‹ëvë­·öÙ®/" " " Í#Ðá¦kØð0léÒÀØñaøøµÂ°1+Ä#XüòsaѬ‡Ââ¹³Ââ—ž½‹5ïÈTR.F„ ½êª«ªyâõ ƒ*wôÈ'á gnŽ}‘=7S¸±Î<Ú¼÷ÎFŽòtýõ×÷©Ö–[nÖ\s͸næÌ™Q˜Á[À Ñ…6„xClš3Ï<Ó6ZšxA™xÑI*¶÷Þ{GoªÔªÇo纕C¥¶Þzë0eÊ”X}DÄoÛn»mXmµÕâªgŸ}6wx™xfÒ&´¬½µc[©E¸AÝwß}ãñ3|è†nðxÚæ3×ò!‡¯iÚæe—]Ö'Ýž{îYoÒaˆ;lÇwçw¶Í±©"" " "Ðiú]¸é¹LXj¹Éa캻‡ÑkêðwÑü0ïᇹ÷_^™=#ô,x±Ïv}i>F…›çž{.>àUªÙ&›lÖ_ý>Iª 7<ìò H<Ù³g/á–ݧ°Œ/xðÿé§Ÿ 7ñÆ[C¶±:µÆÔàWyÞ$ã­@ ‡fv<)—ò­lëÀøchôs+„8Ó¾èØó>µF82l8!´Þ÷£´¾ö±ƒ}cx¡¤íËÒ¥KxðÖšNuµ:7S¸9ôÐCcg q‘ó4Äœ4V B+C¤*‰ &@PÞ?þñð /ð±Ñ‘åú«T~VAð1bDšB^oKþsNê¹opnñà¾u^iwˆ œ¿ZïVW6ST\Ḹ2¼Šö†×Fž5*ܰ/î?ܹÿÀ°šÙЯJçÒÚ`¥4y^)µ¶•z;j¹>knl¨"ÿýïWVEhœÚ˜=Ví<°½ÒõQ-?Áêù]Àò¼ˆ¼ Kðë»îº«\,ÜM Â‰2d" " " ýC _…›îcÃð× ã¶xWQZV²…ÏÜ^¸é/aÑ3÷‡ž…ù©•Êжbnè0Ÿ{î¹±·Ç·¼å-±³å·ç 7Äâ`f`½Ñ)d8FÚ9ô‹=ôPìÀ¯±Æåü<ð@Œ­@YÌ”„+8îû<›Ñb¸£7Þ8®Îò$¢NsevŸDÿùÏbGÔÊ­¶ôCXÉ;ùþØ„8.âC¤f¦JCÖL@à8çœsÊE4[¸±·åÔ— ¢ÄF¡“‹±Î i„#ì îÊ0"3øc‚ŽûÔ•ß:ŽH ÃÈ2 ÊLYt°R£cC\¿oÒÀ–ó“7d­µÖ ˆ—i¬„Ú4"™gµï~Y©ÓëÓáÝ`í!7Œý™ØÁ1X;C€0á‘v;¼$ØNñ¼aL6ÔŠ²‹ 7vÈ“çÑâøkƒ¸7b™]wx cÔ“ö̶̽[Ùˆ´ ÏØ¶™P€@Áùã˜i;f𢠖pO!î?‡°$ÎW­³71œ„k=ëºõ×Á…Žƒw“?.òQßTl£Þ•„®Ç7½éMåc Óíëc‚ÒZ» <Î ÷P:ᔥñKù¸·r>ò‚™Û}ˆü§v‹>ƽ—û†¸ˆÈX´­XAµþvX>†gñb!=n~ðŽÁãÌŒ6£<Ëò4á^HpoXzO"ßþø½aH‚½7Ú&1¤R­–ë×Wé3ÇÆ½ #h6í>5/X¥C$¹vø­Ç†ÚðÇ”“¾‹€ˆ€ˆ@èWáføŠë„e·<"Œ\y“BDZàñÛÜÿPo(”^‰ê#P¯pƒPÁÛ= a™&²ŒŽ63‚`Œ—§Ã‚e 7~æ˜(ùÃ4ñü[HÞ®ÓÁè¤!äxóÂuŠüvÿ™ÎØ"Ìã?î7G7pÞ|çº~Öž¼t¶Þ¯çiÛý2+t¥Nšåµá0i'±Ù .ò°¡s‚áÅT¸±´VÇt™Ç/biøN¬Ï‹à@ç¡1W~3eàŒe–±‚Ž”7Ú:±bòöÍqÿ«$Q¾7?ŒÄ¯·Ï䣓Ã5„1TNp–n|L¬r*­cØ’‰;•ÒQG:i°.Z/Ê+Ú÷×FÖuí…ÚDÚáõu§ýL5}M$£½"ÈdqçØÀN§6ϲîeyiYo3$! !z{³kƒvA}ióY–×Þòî ©h“Æ ñB@Öþüý)r—•>]g¬ó¼ŒN71»÷m+äñí…ï©eývÆx§éí;œÉL¼Ül³ÍÂ:ë¬c›—XfݧÍ{-“ ÷*ÚŸ ¬i¡x,OÆÿîùã­v}¤å5òÝ{Ϧ^5Ü9gX–_#ûU^¾úM¸!ŽÍ˜u÷ˆÞ6}wYùÛ 7ý9¼|ÿeø7²þ!P¯pÃL¼…¦£\)Æ…=óŒwJ,íìø7yt´xëÊŒ53¼d‰ñð‹xCy˜nâŠÒ¼*è|ó@Œ@È»xS>eÓ¡Cƒ‡^8Þ¬ó`ë|œ éhã™?f=±7õY1¬Œté¾ÙÆ1Qoއà)¥¸v~Øþßÿþ·ÏÛß¼NiÍZ-ÜØ~aÄY2Ì; ^Žtlx#otVˆAùœ?Úm£Íní<çs„qŽ'1ÚÞFÔ £>tÚ0¶Ñ$m–6c1d¼¨†AŽÎ=ùÉgB×Yúæ=žüáº4Q•ú™Yùžµƒë²šÁÃ{TeuVóÊ@Ü3Ä=„/ŒÏö–?½6à€ÊõÁuB{"ýÎ;ï\¾F蜳žkïŽÑØ]xá…}†?YÇ9î¼ô>x“0»3Ù5mÛñ¼ Oý:LÄÍ`,OÖ1êÆ}éâ‹/î“Äî—¶’cäx8.Ž…skbÛ¸²³¬{œ‰c×Gê2yòäèIhepÿ„í’ë O3/jQ:æEÍ‹Bˆ÷Y^9æ…Ĺ5¯”¢m¥Þß‚ðâ‰Á1‹k¾”i"¾ÿ}£Mpý’wƒ 6ˆyù-!/Æ9…›ìñ6ÂÒ߆´ýqáN;§|Î5sž7~zô¢×GÌܤ?^¬¥H®§—^z©Oé#O5®U™ˆ€ˆ€ˆ@ÿèᆱãK›ÛF¾nÚj¾àɻœkN g=’û¦»¦•x & yÈBä c€!Üð°ËÐ,k¶:yhE„`¼>B:|X*ÜØC,íÑ„ho ’ux÷˜‡B*Üø8>¿½½¥ü¬‡J†ÀЙ6ó 1Ø?ÆC<%:¹f<œ3DÇ:öj%]5K¾ æH§Ê›Ùv³:i>/ŸB¸áaž)íÃ[#ý›nDÞ@s.Í8GÖÁl¦pc1IØWê©À:ßvh·6ÌŠŽ YJ;Ëä£o3¸Ð)ó>ZC?…7ùj1;ÿé𾘮Ï*a̳7tüé`aœwΖñVqÀ‹>à*yÏéøòvŸ´yÖ á†xR AÁhÖn¹æb„‰iÃukžu0æ¸ð¾3+Ú‘·ô,+uÆÙî¯LØûøÐµÔ«ãµ‹Vï+ ³>ywÑ,JW#F…âÐÙÀ¼pâlmÚÐ<áÆÇ=)R}:$6|Äw<òßÁóÂGº/ÿ†Õ 7æñ’¦Ïûž\7Mï¾½|šÎ{(•˜"˜uÒè·“p㽂ü±ÔËÑw^¼hç˶ÏÖJ;J&ÊÔ*Üø7Þ¶jK߉÷^F>&†Àpmd SÌë˜û2²>ŸÃ¦*'ˆªÅaav3êdqRl=×Tž`jùüQñdžMeu¸}úôs¥Î8iýµ·K©YûÏV,½—ªƒPƒY[I=q,ß@ 7ˆL[–áAÈýcˆ Þ2˜1‰_ÜÎ3©?l‹-¶ˆ«ó[4‹ÒÕH Âwuç—˜övÖkÍnünú€\ípŠ7<€â6Žy¯ ´lë °Þ 7öF˜õ~È ß1ß9-:TŠÎ—ͼcBˆ›ÖÇÞº¶*Æ Ä<›F8Zç®Ò±ú7Üy7 Å#àkj>V†ÖA‡†˜_Ÿæ/úXMÌ&…7 ŸÍÒa†õ 7‚5O ÔÌ>茚e­Ç“ÉbTqÌ6‚L^ÇŽò,è· ‡´}T[ÚµFÙæ=çóøk#O¸ñ3JU*eÂ2 ²†Jµ«ÇM¥¡Rví§ÇdëaÉ=Œ¶†À†!ZÀ)cç0oÈyý´ö•„ï݈Cœt”gæ¯Ù¼5•ÚJ#¿vo¤.ˆ§x¿ùºV*Û{”f ¯”éÛgÖDn*Ý3½híãȹ>¨C½† ·ûî»—ƒ½çÅGª·|åhŒ@¿7<”tu #'®–ÝöÃaxi†©"¶hÖÃaε'…OÝz{—ß|É«4Å 4C¸áá’x6 › DgÏYÒäËnülUY.啎¦ˆpCý,H+í‘xo~˜ë½pンÖ:$Äï#ýì¾ýCyšÎ:žé¾ åH×[~·nᦎþ-~^'Ýw´RáÆ:§ytïQ 4«)-Ó8×»ôB±süŒb&ܤó¢ûBœ±Ä~yÚF,/ˆt=ÃgjeS©3NÝüµ‘'Üø€àyKŽ—:<Òsoçt=ûÇìúÊ;ýã¦Hpâ´î&ܘÀɽX3 u²î3–‡íi;d÷m<|( Ën¼¸ƒ‡h“ç¡ *ýñm-/ð{¥¶Rïo‡Ï—2´ºy*}¡à…›¼ódbsž'‘µ?ö‡·¿›©y¯¶¼àÄy×GZVÑï´` ^Œƽáuä§~öpŸÎöSÖgušüliÀÈrÅ'vÁ;±vnáè§ÜF$¨u:pëœÒ¾Z’ÎdCX¥ÂŸ~9kºd:8tòñÚ¡£Jù]Ž™Ž?b¡>Åv?Åy<ÖêK:3‡ïEŒë:+1íœvƒåÅPAä@¬bÉñ0Ô%mïä÷×T­³fïFÀ1õdz.¦Ò2ý÷jqmTê˜úá(+׌ ÐL›°àÌY‚«uœÛU¸y„þ#"¤°´m$´u³,á†m´SΩµá4^ йåmÝö› 7i¬®ËJ¢ ÷Nk+6ë×$r³¬Z[©÷·Ã•¢msQDÊ´ßê”þ–q¿ ^íŸ6‡0Èõi‚—Í”˜²Ž™J¬ýÙwÎ#l¨ËÊ+¯3D7ŒißPÙVôúˆ‰ þáþ‰w–í“kæÁÌÍHŠ·‘LD@D@D õúU¸±Ãé¹tÉãf¥0|ùRGkŵð1ãã¦Å/Ï ‹Jb͢秇Å/=z¼dY´ìGÍn¨"ñ5LøÈz“^I¸!¿÷ªà{–¥o˜knxs‰„uæ¬|ꊇ TÞc©pC‡‡ãc•,}À¯”Ö?|3Ã"BžeÅQ N~ö–4/ÇÃŽ;í¨{o³Ï>»OGËf`"È2ž"EÌ<èX䟡œF8Òqd?ÖùLëEGƒm¼%O…›Ôý?ÍKz¼¯°T¸aÂÌòöMÊ #moÍS±‰4YÆ,aˆ:Þü[ŸuMÙ¶tiÂblÞt0³býX90$Ðqz­Øv[R'†\! ÕbÕ:ãþÚ¨$ܰO;¦¼ýs „ÕÄKgçvn¸Žè¬[¼"«³-ÙΔõél[y ù¼ÀBþ4X1BÔ’ØåÛ8é¸çr_cø– 7Æ1n,ðdžÞÐÆÈ‹yÁ3-¢Z[!}=¿ G<Í3â>ÿ¬ûº‰N>¿]Ë;î¸c_Ø–͸!òs¯0ï_Ÿ¹¯áaèŰZ®´¼¼ïxŒ2ü°¨¥¢pÑ|J'" " "Ð8–7VM¦=fB×5&®Zjqémó—Ãc Ÿ°$Z¶€@- nÔöðž%$ø8!¼ùƵÛÂð¼˜x ÐiMbyH$xä}÷Ýç‹ì3Mm¥¿>o‘è¨ãuAç–ŠÏŠÿAx 'رïÜP6?¼8èôµôá›ré\ù7½t:y KÙYF`Š`æëDg€x< aã¼¥ÂÚ–v,L¸©eø‹ p¬$Üp pD@ÁkÄÞ <ðH¢t‚Rá†t?xh׳¢ ÔÑéÅò:‘ĵ }¦ûæÑÞo¹å–²h *ýá\"øpùó 'ÞÌsäµÚÃXÌ›§áÆâk¤ñ7ÌË ëúµ:û%]tæaêc¦c 7Ú~­fq:¥ ‘IÍ_‹•‚ô’ö´Ã;D3^Ù†øÇ"djÖqΛbÞ„è<îÞK% .î«èw/€r]n³Í6QðÇŽû'÷…Ô* 7¤õC“Ò{ÛaɽÁv|œ#ïaB[÷÷vãHþ"vÛm·Åû¸ã½(Ó2ªµK_ëoù¸¾˜uã6£msÝp_€?–U?î¹´~×ìü˜pc1ª²Û~Œ×¿7 ½²–†mW_}u¬‹­cYËõáóUúì*¥³mnŒ„–" " "Ðz-n8¼Õ—öšÿºx¤ãºF‡ù½ É ŠO)ÛzD·ÇZ„›V=¦2¥ƒ‹7 ‰¢×é[j+×§IÅ KÃ’8Øtf(|:wµšPö^xõðWtʆAcx¢y¨W»[#a#ØXç’ã5a"K¸1ì—Î)ËzÚB˜y¢ ±¯"fùlj9¯œS:–téD„±„EŽïšzÄšþ®·oO°âܰLæ…@¹o!ÐðBhj¦q^±¹–î“u¿ñ3æ ý©µNˆm6Ä4õú«µ,Ÿ¾žßÚ5Œ¹çÒn²ø}øÏ\ˆ«œ'òóQ«$~{ááÃ;’{e –ûx¬¸þˆ€ˆ€ˆ€´”@Ë…›‘“6 |a—>yØÜŸõù®/ýK ]…›fµy P.žéØ|ÿö·R§¿™õÊnš¹¡VVáf¨1ÑñYÂM×Þ ÖYÃH©‚‚RõÕéq˜wP%¯”4Ï`øî=O³†XÙ1d 7¶MK¨D`@„›/¾¼iXÿ•Wƒ-R¹ãç¦õ̬TOmk"¡"Üø©ƒÁGg·×t@è˜X|¶µj& 7Ðn®I¸i.O•ÖZ!Üà¡Bàv3¼º&…€mžm¶­Zl$KWm‰ÄÌex–TòJ©VN;n¯?Ì×W§¡Ï" " " µh áfÚâÇÂñóÏ®¥ÞJÛ¡"ÜÐA SÛæo“ñÆ©4“F^ÞzÖK¸©‡Zå<n*óÑÖö&0 Dˆ+F<#†›åY:z^º"ëýtÚyñΊ”ÓŽiÌ“¨Z  7íxöT'D¸Yñrá‹s7íCèøù%¯›Åòºé¥Ÿ¾ áÆð†©^ñ²a sÄbFð&yz)è&S‚·Ê˜êÛf5!p;Æ i‹fí‡`¢ÄF">Ãd"0˜p?à¾@§ŸY£ZiÄÐaf!‹ß…ˆÃ=‰ké±Ç‹A»›UbÛØ F¸&îS§±{ˆ™ƒ'ÑwÞ™{XxæàéÉo›e" " " "P”@Ë…›¥7< ,šõpH‡K±è†pÆÂ¾3=¥«ÀPnj££Ô" " " " " " "Ð>Z.Ü,¿ó'ÃË÷_äu3p@ÂÍÀ±×žE@D@D@D@D@D@D -nVzËOÜë~ë˜Î.ÅP)†LÉú—€„›þå«ÒE@D@D@D@D@D@D YZ.ܬò ¢pÃp©·,˜,ý÷¦!SžFÿ|–pÓ?\Uªˆ€ˆ€ˆ€ˆ€ˆ€ˆ€4›À€7ˆ6æuóùç×S‡­Úç¸$ÞôÁÑô/nšŽTŠ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@¿á†#yùÆX7 ž¸3œ:ö£Kæ¦õÌTÀâ%È4¾BÂMã U‚ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€´‚À€ 7^7/Ýun˜Ú½J8nôA™Ç+ï›L, ­”pÓ>e–Pᆣœù›}Ë{ܨƒJæV)÷äãi4öYÂMcü”[D@D@D@D@D@D@ZE ­„úà[‡ƒ‡oUñø%âTÄSu£„›ªˆ”@D@D@D@D@D@D@Ú‚@Û 7PA¼aøTž÷''ÇÓ(öYÂM1NJ%" " " " " " M -…ƒ‚€ƒUóÀ±ôÄÃÁÎXx½­Ò2ƒ€„› (Z%" " " " " " mH ­…ÏK^8žFcŸ%Ü4ÆO¹E@D@D@D@D@D@D Upãq4#•§öÿ?K¸ùÿ,ôID@D@D@D@D@D@Ú™À n<Ðj"1pŽŸ–Ï2ä?7*ÜLž<9Lœ81,·ÜraÔ¨Q¡««kÈ3(B ··7ÌŸ??Ìž=;<õÔSaÆŒE²)ˆ€ˆ€ˆ€ˆ€ˆÀ&0è…îqòâá Þ âÈB¨W¸A°Yo½õÂèÑ£…QD  æÍ›î»ï> 8M`©"D@D@D@D@D S t”pÃIªäsØÜŸuêy¬é¸ên6ÜpðÆkÄýŒ~|Q˜òøØ0yÂâ1aXè®iÿ&þа?6Z„ò‹À€ÀSmÙe— S¦L)×á‘G wÝuWù»>ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€è8áÆ,ËûFæ^¥S«pãE› n6{aeÃ< K 7‚];í«¯¾zYÀ‘xÓ€U¤ˆ€ˆ€ˆ€ˆ€tŽn87Yâ×6TŠáQ›nºilêÛ^;.¬9o…oönü¨M$àÅ›Ûn»MæšÈVE‰€ˆ€ˆ€ˆ€ˆ@'èhá†tܨƒÂÔa«ô9WC}ÈT-7»ï¾{ŒiÓž6v%Ü -;…€‰7ļ¹üòË;å°t" " " " " M Ðñ ¢ â·¡¨¸¨pcÞ6æŒ {Ý<É#ÐÏn¿vÞOðl#ö¼nú °ŠAJ ã…ÎK:dj¨Çº)*Ül±ÅaÒ¤I¡Voø¦^Nͼ>$Ü4“¦ÊjæuóÄO„›nº©]ª¥zˆ€ˆ€ˆ€ˆ€ˆ€ 0!!ÜÈë¦o++*ÜØ0©=¯šV\¼tßB2¾¥ñƒRÁ,#K]«$ÜÔ…M™Úœ³Mm²É&AÃ¥ÚüD©z" " " " "ÐbCB¸iëf({ÝnöÝwßÐÕÕ»bµªS~›hƒHvðð­Ã´ž™áŒ…×GÏ›t¨Z£m\ÂM£•¿] ì¼óΡ··7\pÁíZEÕKD@D@D@D@D Å†Œp“zÝH¸ áÉ'Ÿ¬ØÜöÛo¿¸ýWL©˜Ž&Üœ:ö£å´Æ8ÍÊ êü á¦Np¥l#FŒ‹/Žÿë/E9û‹Â vþùç7};í´SØh£Â‚ Âo~󛦗?Ô <ì°ÃÂøñãÃôéÓÃ…^XŽáLJ}èC1ÏÕW_ãÕT@‰Ùû%¦û4{ÃÞ?üðððÇŸÿü綺)ËF5¥*¤­ pÀa·]w —^vYÍ×P[˜*'" " M 0d„XyQïCuv©¢7µ7|Ƽw‰9Y ¢Î‹ðÈY5æ;xøVqYäO«„›“N:) W]uUøãÿX¤jMI³âŠ+†­·Þ:L:5Ì;7v¬ˆyB‡»šýú׿Æ ‹ÿ³Ï>;&ïîîŸýìgÃZk­;j¬üö·¿î¿ÿþjÅ èö¬cÐ e켑s•Q\èOáæ;ßùNXgu¢GÏÁœµ{­k€Àßþö·0räÈðì³Ï†#<²¦’&L˜N>ùä˜çßÿþwøÉO~RSþzŸyæ™1Û=÷ÜŽ=öØrýë_èQ£âwî—•:ÑͲF5«*§= ¬´ÒJá—¿üe¬^‡ï{ßû /¼Ðž•U­D@D@D` )áæ˜Q{†Qaxx¡w^D}á¢ÛÂÌžçûÀî²?„›,ïnRÁÌÖCO(ò¦Q•µJ¸Á+äºë®+wª*Õ«Ñmìë«_ýjXuÕWÅ,_²×^{­_Ýç3ò/}éKqÝ÷¾÷½@‡ ûò—¿Ö\sÍø™?”õ­o}+<øàƒåu­ø€·Ïúë¯wuï½÷†… æî6ïXr3´xC£ç*¯ºnòÈ´ÿúFD‰vn~ñ‹_„þóŸMƒß£¦UBå@,!HúóÏ?~üã禫gåR>vÎ9ç„[o½µO1núàÐX‚ÀnÖè^1ŒîQ†0«÷¥0§÷å0¿wQyÝPøÐ ÜlšucX— O'¿Þ„œ,ÏÊH­…D ĆZ`¯¼òJ|s¿Ì2Ë„±cÇ–üå/ÉíH½ë]ï »í¶[f왌¿ûÝïb¬"Þ^òÝr¡-úÀŒ}ìcqo?ýéO—xp÷ÕÈ;Ÿf >7ã\åÕ]ÂM™ö_߈(ÑN ÓÒÛP)„›fZ#ŒšY••M€ßŠe—]6þþ¼ímoËNTçZîmvÿ?÷Üsßþô§%Jb¨éðòúÇ?þ±Äv­¡L`H7c»F† ]˄պ'„5»W +t½Ú~®wnx¸çé0£wVx¦ç…0··úP”Nh,ý%ÜÀÆ?óâŒq3Ïœt[ÖP+Ë“.;Q¸ÙgŸ}‚ _¹óÎ;ÃøÃòao·Ýváø@Y|ùøÇ?^Þæ? ü0}û3Ï<>÷¹ÏÅM+¯¼røæ7¿?ó–“æ²Z„›¬c¨z§ûmƹJË´ïnŒÄà[6"J´“pÓŸäaÔŸõRÙ¯háFçAD@D@D Ÿ@G 7̈´\Öêžö¾aØdØê™$n_üh¸lÑ]ᡞ§ÂìðrôXÈLØ!+ÛA¸I½kR/œJ¨›!Ü p0íòìÙ³swÕèP)b˰ŸÇ¼j›:î¸ãÂk¬æÏŸ>üá/Q'„˜ 6Ø ®?æ˜cbì›4‘Õ÷¿ÿýoøío7û!G?øÁÂ]wÝ•fëó}ùå—ÃÈÓQÔì8Ÿzê©ø¦6/_-ÂMÖ±d•;zôè@§Æ]®ÅV[mµC¡RÈ*¯ç*«\Ö5C¸3fLX{íµÃRK-½«hçXÑ7Äi™2eJäJóÿýïaÑ¢b^‰´†úñÿ¡‡ ´‰¢†ÈÈ9™6mZKb[0ý:C÷f̘fΜY´ša…Vˆ|_|ñÅpß}÷…žžž˜·¨(Ž®=öXÌÛ,áfܸqñ>ÂyàZ¯tÞòbÜQ!a£Œiƒ–—{ÙÝwß8O­2ø›ŒvÁp£Ôð¨äžüÀ„Y³f¥›3¿oˆßb»èqS þ³½/†S^îZüX>U)í`ß6 ìlH^7S»W‰AŠñÄIÅœ<ε7Ìœ²Í6ÛD¡ãÿþïÿ¢ZD=ŒÎ>3ªüþ÷¿_bw&¤1nÈK0O¦±¿ÿýïáâ‹/.ç'¨ó;ì\kûáaûg?ûYøèG?ÅâËàUbö£ý(0, o›¬À¤x`Àóñk,?±_ùÊWâWÊâaÿ¨£ŽŠß­þ¡ú裎Û$`èõzýë__®/}ôÑ@ç./ˆñ[Þò–°çž¥¸Q¯2¥,òq ~6: GÆÒºÀÿƒü`ÜfÒc¹ãŽ;â¦ß–bu•:¤§žzjì|zè¡}öØDpKfÃñ¶å–[–Å0ÎÇŽ F]ž~úéðùÏÞ'¯ú¹ÑsUi7Ĉøâ¿E¿Žñûßÿ~äL§‘v`Þ]>™OúÓ1Æ…_OzÚ­ó:ÁÄüA\Ü|óÍËç˜28¿ˆ7ßøÆ72Eœ¥—^:¶[â_P†ùˆçDœ ßnÙNº]vÙ%Üxã5w¸è¼r¾7Þxã>ûcxªq Á+Ë86Úhoæ!Ç0D:²yÁ‰i¯tP9@8epœ—^zi8ï¼óÊYë NÌ=¯<Ä oó^~9\vùå™ÁÕó„âl1ûX^;ñå§ŸeÔHäw6H[ò†PAçÏÃT´8ãŒ3b{årÜYfœn¹å–²÷"éL¨ãÞŽPþÉO~2ОÍ„¹Ç#`2„õýïŸûu¡ìÓO?ݲôYæ± ßE]”9ÔÈÄYÚÕ{Þóž(ÖRŽÝwÙíûk_ûZŸY%O;í´(ôö©Àk_سyã;¿I\Of´‚ésï¿á†luŒkƒ—bžqN9唸™áS ~î¹ç–ø] Þ§pDtõÆÐb^Xð›C¾¸ö0D÷7¿ùÍa‹-¶ès¼O ßõõNËÑwh)Üð1¬«;¬3lRx߈Ãê¥!REìÑžgÃï^XüDXÜÛÓça§HþÁ’f …8™xcÌŠŠ6¤¯E¸ùÄ'>6Ùd“è ‚¨àãÅØ¾YÒYJÅ›<áÆ<.È÷¯ý«O§ˆqÞC*—VÞ´4¸¨}ä# bTÚáátï½÷Ž-l·ß~ûø€›W>å™'† KÊJÛSê|¹ô°›¾åõ ²ò‘¡ £“L§&ÏÞûÞ÷öÙ” ×3fçvYœIcD"ïEƒˆlΜ9}:Þõ7±  ª« Yëö¸¡õ«_ý*ð&:ËèÈñ†q'«CNGˆx&y†Þ`iÛ#= ™YùÙ?‚#³£™qYWiŸY8›õˆã@ˆÅ+¨ˆÑ©†Q¥ýql´[:ÛÞðaø¢ïûí´7î-œ‡,á†{‚;åóòo…õÖ[/®®U¸áúâzÌ«›•hiÞA¬3A"U ‘ͼûÞúÖ·Æ:ùÓ(£FÚ íÑÖ‹Èiñª@´óœ1À3†Yfin¿ýöpüñÇ—“ s¾)—sï…GKÄýèÏþsHïq¶%3"ÜyCÄG¬t<ÌHÌ3ovrm Dr½gõBØÁÃãåCVýÙFZïÆöÁ¶,cßY£ažß¦<óÇÁñàEÉõd÷lˇÇ&×f¥vŽðòÝï~ײÄå;ßùÎ(˜òå‘G©xŸB¼CÄ“‰€ˆ€ˆ@;èHáÐã»–{ß80|󚸟»èæpé¢;‹;ÕÚA¸±€Ä0®eF)Ò×#Ü£“wÁÄá;í´SxÓ›ÞTîÈñ°É[;³,áÆÏÐÄ›r{SHž­¶ÚªìÙÁÃ.®tÂè â™b!ÒÖ"ÜÐ)ùQ©ÓØ]ò4@x@ŒJíë_ÿzÜ]¼&x¸åá :·ØÉ¥ŽÀ´’«<Öæ9aÂÛñ:ÿüóã?löÚk¯Xb‹ =‡ÅС3Ï›gÞh3…9o3&p ‘³ÕÍ:¯¸äÓ¢#ùÒK}¯µôXba¥?&Üðc`¶›+®¸"é¡®x a<ôÃÀ:©^¸a;kòâE±Õ2L†ü•¬È¹ª”¿^{ÓNÙtƘÂO² 7Ü0¾íg(Žì¼Ç B^ó$¡c{å•WÆ mï„–Špxùðƒ'mžëˆó{†@aé›{ŸáâÄOŒoÚßýîw—½[.¹ä’XC›,f+ò‚œÆÄÉbCÑ®1Úe†eQw¸ã!‡Ñ.¨›Iftc‰Á‡v‡àA]ÉëÅ T¸¡Îˆ!Öá$ç‡ky×]w‚¬m£üZ„Ä:ÒÖé¦îˆÉ\—”½Ùf›•= ðì@l03A¢ÂM£Œiƒˆ'ˆæ…Ú/CÅhÛn»m¹-!ÈÙ½ Æ áÆxrN ª‹7ßG±ÄKDÚ1Žx€˜ÐI;°û"e1ÌëÑ®YîOüÖ0¤öÆõhç/ök–Š*þ÷Ž{Aßá…]sÍ5á³uá[FF—†!´šÅ}ïKŒkÅ® ~ãø½¸ù曣—×–ÍÈ}àïxGÌÃï^?äÅKcß^xaüÌo¡ýä 7ÔAÆŽ›óõŸÿü§ü[††b”Ëï‹™nXÇý‘z_ýõaâĉQT3ÖYB±•£¥ˆ€ˆ€ 4Žn¦”fúÐÈÝb0âZ ¬øW ®Ó{ž©%Û JÛÂ÷¸IWƒY¯pÛQ:Ö¡g? ˱·Õtv>õ©O•wŸ 7¼íc˜ƃßYgUNËèóIgÔoŠ 7tèè”Û›Ó¬aR샎%®<ŒÒ‰1«ㆷÄt–±éÓ§G÷yËÇr=ö(?|ãJoÃÁèí¸ãŽñáž}áoFG7|Œ¨[ŠÆ¸É;_Öþð‡>öÇTè/†Ë Œ¯¾…) zax%|kÞ¹áÞžÚÝÇ@§háÆ{ÛÀ¢UÂM^p^Ć0,èý¥‡z3/ÜXPI¶ñ°×Nj<´ò6•+Æxo©ø7舀ˆ€ 0Žnj~ãσ÷ñë;åó@7ÆÎXt}°Ï|¯ç<ÕëqÛX:/©L˜ ¡o„Ì„›øåµ?yâ‰÷^ñ¢ÏËg∠nãè€eyÄÄ ¥?Ôcà!žcÄóÁ¬’pCZ‚=bÖi°|¶´¶’z#±‘…`¤túJƒ×ÿmXM=ÂM¥c1áæ¶Ûn+{yX=miB“ϼpãϯåñC¿l]º¬£‚´EÏUZnú½ᆸ “Åò„¶Y'?íhZG.õ4!¢ CmXz/ÄDìÈ#ŒÃÐ,Oµ¥ï R'†±0üqÇ<"ª•Qïvf£¡³Ïðþãù€øÂñaˆatl1 ˜Z‰te½pÃ~ˆq‚q=|á _ˆŸÓ?ˆ+6´¤¨pÃ}ÄÄÉJe§û²ï&Z4C¸i„õi¤ ÉkÇœ.A#ÂM–ç û1O+îÇ×ïß xØMóÄ!­òãó3Ƽ¸€l¶@/Üä‰>•êU‹pƒ'÷NÚ>×÷}U†Šaü~Zœ¾×+Üàé‡pƒ¥ÃãÊ×þ Û‚àã GÌ 7©˜”þpßd¸-†g-Ce" " "Ðn:V¸Y¯{Røü¨ýØ®51¹waøîüóÂ}=KzÔTP'¶Îx^gݪŽ€½óŠ)qYäÓz›!Ê`^¨±m¶¬%(±å©G¸A°A¨È2:ž7†Ž?³HaY ḩ{pÒâÑÀC3Æ{fÊÈ2šyÛYM¸á?ñb0ö…à`i¹tÜ™I%+þM%áÆò¥åe}§kÞ9<°C†·ø•¬áÆê”u,&ÜTâË9à\ðv›˜&˜nx°§cíã°~¿Þ®$ÜÔr®|™YŸënh§ ƒÊë Ú~¬Ãæ…/Èð–™ÙÅòì/%‚ø^À .¶jûÎ*Qƒ:‘?5α;‚òré-{³ †k°’™p»–«ñ±¯^¸!ö† ¿$¾‰y=¥ûöÃÝŠ 7EËN÷eßM´hT¸i”Q#m{‘ÍÊTíüØqû¥1hD¸ÁÃOŸÔìzË»6²„Ï"-/ï;CvMØ0áÆ_ãi>fÅcHkV½Š7x Qwî³Ô7Ïš%Üðr߬Ò5Ä5ˆ‡y!Ó 7Þ'&|í1‰ìyGÂ'£Ï" " íD #…ZV¶bxoiF© †½³(ô{?~_šYêÑÅÏT|()Z^;¦k•pSíØëm(³á†|xEd‰$6ÔÄòŸõ…N%1|½Yz:B H‡\:º,+ 7¼M5wsö…h“å.oåÓd|þ-¥ ‘?{-fm«$ÜððÍÌÞÜ[>[R.¢ ÁŒmªTÐxáÂ…ÑS‚7¡ˆ-Ô—r9Æz„›JÇbÂMÖqZ}y£Š×Ï£¥`šˆ@X5ᆡ7(4fHþÐfÒ)Æ-I­çÊòå-ûS¸!)AB}§Žódž4Õ:¾Y ñeæ[Öz:Þ¼éæ´)Ú}:†F|¬$ÀáÕШX|ÓM_ae!èÒ¾iÇœ™m&ÜÔ" 4"Üp2T +*ÜxâÑök1-neÔHôyyÁ½§3í"Üx–GÞMîË ½e;íÎf¥êoáæ€ˆÃ=c~¹Ž¸†O8'X«…›<ï6 7þl鳈€ˆÀ`&Б 'dù®±a—¥6o±mMç紅׆½rOx¾÷ÿ\­©€Ax …óÂA´©×ênˆï€ÀnêÄ_¡óég÷0!†Ø/:¼õ#Ø'F9>^ëk˜™†‡ØtJj¶û©Âó„Þ.šg'mòÞ)“ý™H„èÁ~o•„‚¢2{ö™Ï|f /"_Žÿlâõ"v‰7ÚMœªU¸©v,&Üt3kÈ  ç #޽}­&Üøú×ò¹ÖsU¤ìz„ª*ÅÛif.Âè4Wóâ«Ä€¶ÃÛü<0u+ ¨TžßÆ0k?t4²ˆ@â…\sÄ„ÊhdïLV:Žz†J!P2ü CX¬æ5º?&Z4*ÜPd#ŒÈ_d¸íÁDF?\!1 uC€ã¼ÕbÆ ïÜøv“7x3=n<‹4ðp‘ãêoáÆÄ[êÂŒnˆF6¥8ëˆ#Dœ,¬YÂM3‡JÉã&žý¤:V¸á|¬Õ½RxûˆíÆÃé±mV.MIDATV-tzîZüX8eáÕá¡ÒÌRl­nk¾uDjÂM#|ën²‚ÚÂ⛥©z™j›¸~ªcn:Å*:8¶ÙyÒ!;^˜Áó—ôéÓ§Ç€ÅLNçÌ,K¸¡ÓŠ7 ûaxC²xx¯d>>C™è¼x«$Üp6;oœyèO†ˆaÀ˜›E wzêè½Z,SÂÚ°¢T¸ÁÛÁ†/¥ìÈ_íX¬ãMZfÃbZo”mH6¡þnê9W¾®yŸën謚ÇXâ;¤V)8±:%O­Á‰}P^fÕ¡#—oáixÃÆfûJÓùïÌþÄ9ÄüÐ,Ÿ¦–Ï>ˆržG‹‰”ë…xQ5Ëû‡¶{Di¸æ‡Jñ½œ¸B`Õzƒã)G¬œØ Qìcv8b‘Æ‚w³ÞD‹f72j¤ zv õÃs,5ŽÜ·VdÃ%Ǻ<ñÅGk•pà u×]7¾HxûÛß¾Ä===6ÿ½YÂMÖ0*IÛ¸ÕÁ{µUn.ºè¢(ð[>[Z{æ·Y3‹ÏÕhpb 7FTKÁH £…›±]#ãtàxݬÝ=±âùy°ç©€· ÓÏí]r‡Š™ÙÆþn,¨3âŒÅµás#Þ5Yhën(ëÁŒKÞPo½õÖ1 (…1Þæò@i– 7¬'Ç 'œÝÔùÎTÇË5Ã3±ÄŒ™ª…0p)¢ ß9V3D&„Ä"Ó_sÍ5Qüµ¼,Ó}ç K>}fŠsÚ#– ¬ƒ3A ±”c3…›F5ÒýôÙ é¤ÃÏu`†°… Jœ7êjâ›Åhb=Þ–é=ïBî[XÊ×¹<ѧž7ì/HÚ7æ=‹âŠ×þð[xŒá-i×\£Â ×§Ý'­\ö3a„²ðǰXïÊv®K^lðû†Ùµ¿”þøX5ÌêE½SËnì¸HÏuÏo7Úy‘éÀ%Üxjú," "0Øt´pÃÉXºkTX¥kù°Ëð©aÛaë„Q]Ãûœ£ù½‹Âµ‹ÿZ4-Ìì}>¼Ô;¿ÏöNüÒÂMñí§ö6!çÔ±ùOëá[pÃ<ÿ™2:Ëx(O§»æÁ“ΘyÜX>‚ÙòÐI‡˜2¶copñ*àûšk®YîÈ‘Ùex çÍe*ܰocÃö•µô1^lØRVýÉ[M¸Apá¡Øf‚"_øÎð:Ùö6ß{ƤéÉK:¸¥Â i­¾|ƬÃÄgÛ–w,&Ü0Õu.Èç ¡ìØR,?µy³…›zÏ•¯gÞçz„Êòx†°1å6"mu÷Ýwˆ f0÷ÓÓ¡Å»ÅÚCј™j¶ÜrË8Å®m{lÆŒðñ’8ã!:Ä[Áðê@x`¨‚ÂŽ;îX¼ç ×É)§œRîäQWÄAbI‘á‹&(r,#í IèÀ’§š!îÑIÇ8þK/½4:¥1dÐD?+Ç 7tü¾XbÆÑ‘|/ ç0K½|§’4på˜ÈËL<þz«E¸!fXãÁ5ǹ㚅=p0ÃðZ0±†ïö¹72j¤ rìÜ;lêmD™Ë/¿< ‚ˆóœ_šÓ!Q~˜!¢ Iˆ ´Áw•¦6Ñ^­n`A=þ…!Ô_qÅñçûƒ«Nž·¥bœ é5¿ö‡ö’œØ{NÑŽ‰Ãlo6Ë’ +…àÊ5‹ˆƒgÞwÖÙž 7'NŒCÙFý`¸î)Û3+?õ¸!÷8+Ÿkˆë/ZÚ8mÝ#êägãRŒˆËD@D@:@Ç 7œ¤áaXX±{\Œ{3¥{ŰB)þ ö\)ŽÍôžgb<›gz^‹ÂÿKtèŸþn¦•†šyï¼noê Bœ‡¿á†À‰¼mÃ+ÅŽŸòy€d˜oüùì΃ÄIgƒA€!–5U6ëé°Ñ`Hû@ä1o•T¸áílµ™n(³)Éétš§ÂJž/xŸ¤Æþͳ‡r:i©!Ú0+Ê”)Sút@©/Ö !á³7¼kžcËl£@ܼ˜è$¥q€ðž÷¼§Ü)'ÃhŠ‹ 7xÑ%0°=ÈSÎܹsc›–7‹† Òœ¯F¬žsUtõ 7ð ½Zg/Ý玧¬N[â¾ÐiÌ3:S 3O&ŸÎwøüzûÌþi«6c뙮ش´~‰¸Al)ó0Cˆ´ ¾¤;ûì³skûrølCPÒõö¿uþ½pÃv<†ð&ð‹åcI›âúå<¤Â Û÷ÙgŸ%<XoÆuÉý«E¸!=q· òêFĈtfºf 7ì£QF´AÚ=÷רOjÜÞÌ4 Úmâ–­·¥o­nØ7íë1¯^¤ášb8-çÖ¬Qá†ád\ãÞˆÙfÁò¹.lX¦OcŸ í÷5nHó‡ÒïTzò×pžpC^âÐñ[V©ßxã}îä“p™ˆ€ˆ@'¨îÐÖ6ɾÆå½‹Ÿ=¥CÉìÁªZ Q<+°"ÓgyטN;xÜ Ü0ôãáÏ„é¥4lk†ÑÂK€Ž-oÔSCä8¹ôf˜¡S /âÁ¶ó3!–T b\t?(¸¼ûáX•òâ=Ä1Moeáù„¸C`Kê]äX¼pC`LlréÍ3/†ðöu°[½Â Ç :6œ?opaxCH˜v:K¸!=ù>[ò$³·ùVébÈp,„”,C@CØ|óÍût¬È‹Á0†™3g.‘•˜D ¯J;st‚È[vóô²Ì6®é¼:Yz¿D¸Ã;È ~´?<ðÀÃ;ôÐC—+·Új«(pzï4ÒòöŸ3e |Á;íü’ŽN/CÒ¼Ø ¦8&~”‰Ãx+qŒµuƒ…y–X^q/BÔKÍ„êO`f3n Â^MX³<¶l”Q#mß5Ú  ˳:Ñ–h¿ÄÂGjxs0L¯2/ ˆ{ÇØÑiÇf6TŠßѬ٬FËñxáÃò²¤ ”`Ü¿½ Ä:ŽÏNó7¿¿ÆºpSòJ=fLÊfAú}~®Q†h¥íœýãÁƒ—š›Ž;î¸ø%M㇧á1‹çšLD@D@ÚÀn€ÿõч„zçÅó0®ktøò¼ÓÛíœô{}Š 7ûî»o|¸:ìŠÕJ>KÝëeÞ5 •¦v¯½müЩŠÔ°±^njØUMIyàÅ»õÖ[cç‹h O}z¸ð kÎ_k†ý÷ß?Lž<9<ýôÓáôÓO/g_i¥•ÂÑGzzz‰'žfÍšUÞ¦" " " " " " Í! áFÂMsZ’J)Là;ßùNXguBooo8øàƒ 糄ûÛßÂÈ‘#óÏ>Ž<òH[ÝoËßþö·a¹å– óæÍ ‡~xy?ßüæ7Ãú믿ßzë­áßøFy›>ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€4‡ÀnÞ;rçðBï¼Ho\×èðûW…®®®Ø‰nÒö/åu¯{]¬ä“O>Y¨²»í¶[ØyçcÚïÿûáÅ_,”¯?µcŠo§75l°A<ì[n¹% ä±ÕW_=¼ï}ï‹IÏ9眀èÓvÜq_Æ-î¸ãŽpæ™g¶C•TCF¸é*쥻F…ÉÝãûœö=³ÂK½óCoŸµý¥Váæ _øBØrË-#”O~ò“áÿû߀jÇ:…Ò) •zå•WÂÏþóðÜsÏB€ø±},¦=÷Üsßþô§Bùú;‘‰5>úhøÔ§>Õß»Sù" " " " " "PˆÀn–ëVêZ6L¶JX{ØÄ0¾kégVïKáÁÅO…i‹g†§{ç„Ù½/‚6ØI¸Ø3Ø)ÂM½%ÜÔKNùD@D@D@D@D@†"ŽnVê^6lÜ=9ì5|ã%¼mì„ãusÉ¢;Â=3ÂÓ=sluÇ.û[¸5jTXc5²Ë.½süñÂ,W^yå°Új«…iÓ¦…^x!7_+=nˆ'3uêÔãå¾ûî+<¬n̘1aíµ×K-µT¸çž{b~¨¨p³Â +Äü Mc¿Æšã†cš2eJX~ùåÃÝwß]qø[^Œ›X™:þÔ#ÜLœ81¶)X<üðÃe–Õv?nܸxîf̘fΜY1y-7 ¯\wÝu瘶J i™ˆ€ˆ€ˆ€ˆ€ˆ€ôŽnü:¾{™°ÕRk…wØ! Ýù-=áÏ ÿnxå¡0«çÅ÷¦b†A¼±¨pó£ý(Š(y‡úÑ~4xQf•UV Ÿþô§1L¼-Z´(\tÑE¹Cb–^zéð•¯|%æ6lX9ëâŋõ×^~üã—Å’Zë´ÕV[…gžy&<òÈ#år‹~`ö¦<0 >¼œ…v5wîÜ84è†n(¯÷BôÅ/~q vÌÊDŒ ~ðƒƒo¾ùæq¶&„/owÞygøáN:餺ƒsî?÷¹Ïež£{ï½7Æ©á|yËnöÙgŸr¬šN8!\wÝu>ÛŸ‰kCž<;ãŒ3Â)§œÒgó;ßùΰß~ûõ9$@„ùÞ÷¾{ì±>éíËÇ?þñ°í¶ÛöÉǹcÔ×¾öµ²(¸ÖZkÅr,_º|衇"/[OzDC/Ä3„›¿þõ¯-™åËö©¥ˆ€ˆ€ˆ€ˆ€ˆÀРбÂÍð®aa½îIáÃ#÷(ªvJ:õË—…ûzž‹zWK>h·n~ö³Ÿ<`òŒÎ±uœ—Yf™((àm“ggŸ}vøË_þÒg󪫮;Îx€ä±S;°Zêô­o}+¬·Þz1ßÿøÇpÞyçÅÏEþ °à5”gˆÿøÇ?ú$AäùÕ¯~ðôÈ2Ä(¦ÍFÜ¡ŒtV)¼g¼(àË™={v;vl$jUŠ:ýò—¿ •Î^NŸÿüçãÔß¶ß<áæ€ï~÷»c2ê|õÕW[–Ì%çpï½÷ÎÜÆÊ´} ÚtÐA¹éçÏŸŽ:ê¨>ÞBÝÝÝqjrùä²HEZ½øÐ,áÆŽ£HŒ›SO=µ\•tÊpïy… uÙe—Åâ½—Þ1O=õ”í6ògHÂçì3ŸùLy*ŸApã¼cÄØùìg??ÛŸ=öØ£,ÞÀñßÿþ·mÒRD@D@D@D@D@"бÂÍ¥¤Žu`ÕØ6)=bÝ|cþ9ážÒLSjÍn¼@‚JV VëP# ò˜h@\†î°žá+E§/"Üpþð´xþùçcl“fOêX…yÑáéC‰ëó†f?>Š7$â˜ýP)b¶Có‚P\ñÚŸý÷ß¿,|Õ"ÜŠÁCÅ JV>ž0ozÓ›âW†´Y\ V 7 éBüÀì¾ýíoÇÏþâ×o~ó›¸ ¯6Ì·E¶§CÙb¢œ?•„¿?†“Á*™ˆ€ˆ€ˆ€ˆ€ˆ€ô7Žn˜úû¸Qùñ1*=~þYqŠðJió¶f 7x³0 ûÝï~—‰†Ø&/‡@Ä ÑÁ|G!ƒa:ÿùÏÂÿûߊÃMŠ 7™•©a%^2l°AôÒ`˜¢Ao'MšKñâÀW¿úÕ°ÑFÅõy ÍË(nþô§?Åø5YC°b¡¥?x‹0œ «E¸AHb(T¥²c¡Z-ܼñoŒ±d¨ “f²Œ`ÇØ“O>ƒ9óÙÇÝá;m‰¡V‰õYVI¸!=b– ccÝí·ßÛ*A™Ó€ÎYåkˆ€ˆ€ˆ€ˆ€ˆ€ÔC c…›u‡M Ÿ¹O×õj¼¢p^辿àÂpÿâ'Šftéš)Ü×ãôÓO¯°5tÖYgÅxXÏtÊ {±Ø+>-ž t¸‰UóòË/ûMqfŸ-·Ü2®chVQO>…Tø²ñÆG1™¼@Ád÷ ŸñÌ`˜” ËÚ… óñ û`V%ŒBÌØ•g»§¨pƒøÄ9ª•µÏV 7œOâîµt˜ÝqÇ6Ùd“%²“f®Êš¼šp³Î:ëÄ886TÍïà‰'žˆÞ=^x¡_­Ï" " " " " "Ð0ÎnJž“‡Mï±}ØtØê5Aºmñ£á¯ ¯3?J=öšò–ÄÍn¼(ÀñÏ›7/±S–Zj©¸áÁÏðDœ}ó›ß=YR¡d^I´ùX)P-q_ÌúÓã&õÚ`Ÿˆ1xY,\¸0z®XënÐL]/ÜxŽÕÄ•Z…êjâš÷N1–Õ–­nˆ³Í6ÛÄjÁæY†÷ Õˆ›tä‘GöIÂ9†­!¶¤B Ü:vË-·ôÉSM¸!1^Ko}ë[Á“³ÄF<ƈçÄ>d" " " " " "Ð )Ü”Èài³ÝRë†#FìT§?,üw¸æ•ûž7jÍn`dÃpÒÀÃõðCdÀÛåÀŒÃŽL ¹ÿþûË3=Qn 7ùóŸÃè’'vî¹çFÏ„80ïżpÓÎC¥l8[ê¢ÊŸV 7ž¯Í&V¥Š7¯¾úêaŸ}ö‰b "–5d¬ˆpãwÄ _ ›c†,¦µ7c(œ•eë´z t¬pÕ»'„‡o¶]jB|®}åp΢›Â£=%o›¶F„“x ‚Ç2¥2^L‹Ü¬xLGÎ4ãXÚÑöÂMVúT°†/>xpÞP¤/ùË1è1ÅzáÆ'F(añÔ|ùÞã†t>81³>y#+g¿ýö‹Ó³ó=¯~–Ö/™ëõ¯}\Å0,›Òݧ!–³#!–4šò±þn.ºè¢X¾¯‡gtýõ×G.~{½Ÿ9®?—â™(—Æ!2±…X86yÑ}í»ï¾á½ï}oLžŠŒEËP:È"ÐÑÂ͈®¥Â”îÃþÃ7 [ [3ëøËënZüp8oÑ-azÏ3aaï+åõø¡VáæÓŸþtØn»í" ŸÆØ¼ûÝïŽAaùž58ë¿ô¥/EO>3 3c„‚ÿû¿ÿcU¶òÃþ0~ölXPêÍS­N”Á°bÍ0%4A‘á+~Úç9sæ €ku³‚Œö…yáfýõ×1PXŸ58Â…ó¦GH`È–58BÖ‰'žXúS‹pã§lñÅÃûßÿþXǸ³Ò†!ÐàE'êbÂQ³…ÚçcæªtZnÖŸvÚi‘1uÉš|µÕV ßùÎwâß0ÆÛ #8öˆ#âLbŸøÄ'ú#Ûò“Ÿ”½cŽ8∠3n²¼’˜N=θUªÏŸKAŠ/¹ä˗̆ð†Ý~ûm¥iì¿?눀ˆ€ˆ€ˆ€ˆ€4J £…àŒ K…•»—[,µfØjØZar÷ø>ÌfôÌ 7,~(ÜôÊÃáñžçÂÐÙ¢ _«pã=Ið|¹øâ‹ÃwÞgÕaZo:û'Ÿ|ryf)¼9®¸âŠpÛm·ÅX.‡|pXuòäÈÝ 0Äu!P¬‰ ÌDp×ûî»/¬½öÚácûXy&ª+¯¼2ŠvòªÕ‰t¿øÅ/Âĉc–,Ï++]úÙƒˆƒBq¶ß~ûÀtÜ>fŠn(ç»ßýn¬;Ÿ‰÷C½9®©S§†Ýwß= ¾˜¥7ˆ';6s‚Ó?ÿùÏ0mÚ´¬w—]v‰S§[þZ„êÌÔåÌJ…!Ê\~ùåQ0Ûzë­Ã®»îZŽÙ’zŒ4[¸áœpn00ëŒ`HùiÏÁ®¾úê˜nîܹ1þ ñì<ø)Ùòã—ÛÚc3f„SN=5Üxã/„Žc¶)¦ôövj)­ ¥ºûî»Ã¥—^¦OŸEFbå aÔ™@Òœ[8nµÕV±,;o_ÿú×cÛ÷e볈€ˆ€ˆ€ˆ€ˆ€ÔK ã…3¾ké°\ר°b÷2qÉúÙ½sÃ3=/Æå¬Þ—,iÇ/knèh[¼Äp›bA€4Öñµ4~IœÀ­xY˜½ãïˆÁ^í{Ö¯<4fÍšUÞ\¤Næ­C¦<ÏŽrÜêòGüÃT¸áø_lzôr¦×>ÀÁñ"nHB<;N¹mûõH¶.]2KCṏtì±Ç–E[ï—œ¶›o¾Ù¯~œm@¸1ï hÛ²– #dèžLD@D@D@D@D@šE`È7lê°Uìc\N[<³Ï÷¡ðÅDˆ¢Æð†‹X^òyá†ïˆ {Yyå•ùZ6 ¼F~þóŸ÷,31¬%íÈ3‹^;t²ñìI­ZŽ>úè°Ûn»Eäûßÿ~¸îºëÒ"r¿3d‰˜%^ˆ¢Ãç"Þ@˜÷ö°Â•æÖ²í™gž 'œpB¦´Þzëe 7¤3ŽqãÆñµlwÝuWà8ðnÂˉòŽ:ê¨òö"8ŸûÜçâ¹òéaýàƒ–†øã ùmQsÌîuø»ÞUÞäãºp\µðe¨u'6’‰T©pÃŽª¶çž{ö9¬§./Mqîg'c=Æp7¦§ZÙ¬çü1m7ÇèE@¶a£F ÇsLØb‹-ÊûK¿ƒKÞcü÷킼¯¦þ6¥;ëd" " " " " "Ð CN¸9iÌûË3F1óÔQ/ÿ¶U&¾Ô"ÜØâ­1vìØ8E³ k±m¶Sš‘i5ÖˆB 93gÎŒ"…mÏ[2tŠ!)Ħa¸Ž?’—‡õ•êÄ6D‰z&3å3âý‡~¸ÐqX]¢ò…p…(Â0±Z hriˆÃÓ>Vï1dí‘ÂÎ^"xò „!€ P!°àA”gˆ0“&MŠâ mãå’pSĘvÿ>úh¦h˜WâçmöìÙKÄÉ!™‰o ñjæ¹É«“Ö‹€ˆ€ˆ€ˆ€ˆ€ MCN¸9uìGûœéÃæþ¬Ï÷¡ð¥áf(ðÑ1Š€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@»p#á¦]Ú¢ê!" " " " " " "p#á&iú*" " " " " " "Ð.$ÜH¸i—¶¨zˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€$$ÜH¸Iš„¾Š€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€´  7nÚ¥-ª" " " " " " "   7n’&¡¯" " " " " " " íB@„›vi‹ª‡ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@B@„›¤I諈€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@»p#á¦]Ú¢ê!" " " " " " "p#á&iú*" " " " " " "Ð.$ÜH¸i—¶¨zˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€$$ÜH¸Iš„¾Š€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€´  7nÚ¥-ª" " " " " " "   7n’&¡¯" " " " " " " íB@„›vi‹ª‡ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@B@„›¤I諈€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@»p#á¦]Ú¢ê!" " " " " " "p#á&iú*" " " " " " "Ð.$ÜH¸i—¶¨zˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€$$ÜH¸Iš„¾Š€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€´  7nÚ¥-ª" " " " " " "   7n’&¡¯" " " " " " " íB@„›vi‹ª‡ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@B@„›¤I諈€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@»p#á¦]Ú¢ê!" " " " " " "p#á&iú*" " " " " " "Ð.$ÜH¸)Ü×Xc°Í6Û„u×]7æ9í´Ó½÷Þ[8¿,ý÷ß?Lž<9<ýôÓáôÓOØÊhïM!Ð)çt³Í6 ›o¾ylŸóæÍ ?úÑÂüùóÃJ+­Ž>úèÐÓÓN<ñÄ0kÖ¬2·}èCaøðáá¶Ûn W_}uy}}8à€Ân»î.½ì²pá…ö×nšV.<¹_c'tRX¼xqÓÊnfA£FŠçº™ef•µÖZk…½÷Þ;n:õÔSû´¥Ávn³ŽOëD@D@D@:›€„ 7…Zø¾ûîÞûÞ÷öI{Ê)§„3Î8£Ï:}i_¿ýíoÃrË-è~øáí[QÕ¬0N8§Ç{lxÃÞÐç˜ei¾ùÍo†õ×_?n»õÖ[Ã7¾ñrº3Ï<3~¾çž{eô§! ýò—¿Œ»èíýì\M,\Q>ofT2ІЉ*4‘h¤);‰¤‰¥°a!!!,*vbaig!beŦÝv%ª‰¿¡H0bJ‹Jt¦¦¾ÓÜ—Á¼gžyósN»3ïÝsÏýλ÷½ûÍ='Hííítvvö”MF­»¯¯O'nÚÚÚèüüâ$šo0j È3P!"GA@A yH â&W{NEŽ—ôÎõŠÊ¼i³yí¥¯ï´wí£Á‹äõtHÏÌ7ÔÐÐÀFGGiaa!D›}Eyi5‡½‹|s–ÊÕF°Ó§V7 c0aGdKK‹©<,jüÇŠ¸1ê“x¹.Þˆ‡ÃA333„ݭصÙßßO{{{:\………466¦“7*\N¿ ÊÂCÄM”ªm«®Æ€7¶¹@A@b†@Ò7XàÑãxAµÎ×ô!í-¹µgw{üCŸüßèËß ò^ÿäED2‡N™%n°Û‹4H$·}Hòóó ‰Œ‘Sa{{Ûð–w¼Ø———SFF­­­ÑÕÕÕCÍèáQ±ziE"Ôªª*:==å~©ð»¼9‰]%%%¼XÙÙÙáú‘ê˜9_PPÀú÷÷÷y!tŸ]V,òsrrø×q„± 1µ• OÑ¢¢"þÞhþÜ3Xðáokk‹ŽŽŽ C—••Ÿ¹ÝnÆíððǾQ˜#ÊÊÊ(77—ï‡Hã"TïcÇH¨”ïò©ÓéäâqÄnܯO!V7Hš›——ÇcCå1j«Z´F"nàgø þÂ|ôTxܶ!^˜3`ˆ «Äèx·‚¸±r¼#Q2l‚Ü·C«««‹êëëù$PGa³ÌKnæ[ŒË }—–UÄ ô#'’g#L÷¶ Ù2žÙÙÙ<¯ ¬Ø¨à^E]ÌkÈ÷é¾QcÀè30ÚgQ¤¾í§\'‚€ æøÿÿ‰é§@IDATìÜEÕÆ'=!@b¨AzïUé"JG°aWì½~Š öÞ+v‘&J—.H¯¡÷Þ!´ô|÷?Ṝw2»wo{ë9ùå½÷îÎÌÎW)ü@ ô²—½,fý‘G)-Â/ù˰üòˆùêW¿®¹æšúý7½éMáu¯{]5jTý_|ðÁðío;<ðÀ=®ëÇ[l>ô¡…¥–ZJ—âçœ9s™gžþð‡?Ô¯¯±Æ1­ú…äËwÞ>ýéOׯ.³Ì2aóÍ7\pAxá…ê×}ùë_ÿÆŒzè¡ðãÿ8|ö³Ÿ ¤eåÚk¯y!Ÿ9yó›ß^óš×ÄtìýçŸ>üãÿ§žzª½Üðû!‡^ÿú×ÇpGqDXo½õÂÁFŽYKÁì·¿ýmýš¾p2Ìš5+7+Ë-·\,çØ±cãåc=6æQa¶ÝvÛðÞ÷¾7,½ôÒº?o¸á†ðï|'}ôѸ=öXxßûÞ×#LÙ%—\2|éK_ µq(Œ1¢”r\rÉ%á‡?üaX¸pñ!аÔ3mgذa=â=ú裶ÉgN¶ß~ûðÎw¾s±úœ?~¸è¢‹ÂO~ò“Å¢}ó›ß k­µVlC´ó~ô£a›m¶éñì3fDÜikEÒj)JÏÖé?øÁðñ~ó›ß„‹/¾XA+}þñ ãÇO=õTLW‘lÛxë[ßø½ÒJ+õ(ýâ+_ùJh4æ*M>ÛíïŸúÔ§ o{ÛÛÂsϽô^ûï}ï{±ßqÿ¶Ûn ŸûÜçø¥ýzÜsÏ=cúŒ§wß}÷‹O{écÇw ùÈGâêš:¯*”‰tÓ1ñŽ~2iÒ¤p衇Æä¾ño„+¯¼²žt•º¥}ýë_'NŒñ£4s:§?1–Y™7o^8í´ÓÏ(ð~×»ÞUO[áfÍœþsöÙm½I«•w‘m×EeW?&Ÿo~Ë[”ퟌ›¤…Üxãw—‹#à8Ž€#à4À°ÁJÜŒ62l2|µð¡±{„Ñá%Ŷ ¢¹a~øÉì³Âu î sÎ/ : ïU%n~ýë_‡e—]¶°¬vò‹BzÀ†={v8üðÃ{(F¡€ì)“Ë/¿<|ë[ߊAÖYg8y. Ï=÷Ô•ÑUW]5|ÿûß Jõa‡Vm±ëÿûß#…’7nܸÅÈ(E€ØtJ¥J¹¤§q‹~[Œ™§ ºwÒI'…?ÿùÏöRT^rÄ u Y!ÒB E^²îºëF"¤HÑž>}z€€°{â‰'"Á£¸eŸ+¯¼r$¾ ÈŠ$U’î»ßýnX}õÕõs±O”*ÈB«œh³Í6 ÿ÷ÿ×C©N#Ûö¦{z¤Âý÷ßh[9áþ—¿üå¡•Š­¿ô¿‹úH.¬®‰¸<œ;wîbä§ÂdíUòÊW¾2|ìc‹?!Gÿõ¯éVýs×]w øÀâï_ýêW‘„ yõ«_]“~ɵ½4Œ¾ô:¿-q³ï¾ûÖû-}Ù’-eÄ͆n롨Íò ˆ”iӦ岽Fb,€ƒø“ضñøã‡VXA·z|B BìPÏUĶ—Vú{q&äyêÔ©1Œ™„]°`AüÝ­þ^¥ÌŒÑÔ9ÑqòÉ'W‰ÃqÄÃ&›lZþöÛoä+컋ßUê–wÁ„ Å7,:0žk UûYÔ/6ÞxãHfµUÒ¸õÖ[#9I5ó$n«ï"Û®‹Ê®0<"…“Tl;LûpÖ;Ž€#à8Ž@1ƒ–¸yÙ° á€Ñ[‡G®[\úÌ çßNœ{yxdá3™»ƒãRUâ†p(ú¬Rn´ÑF±ð?øÁ¢rÎ&i¬&ÚUR&³X `±É +ŸJ=ÖRP‚þô§?Õ•èóÏ??ZY >< 0n¹å–õ{X{Ÿ{k¯½vLïk_ûZüd5ûG?úQüŽ¢Ïoä=ïyO}…—ßX#Ü{ï½|m("nk…sÏ=7®˜ï¼óÎa¯½öªç•ëÿýï ­d%€‚ÈÊñ…^'õ`y€pÜ«¯¾º·ì‹UäŸ|wÞy—]vÙ¹‡âÙ !‘’o-nX=†´¡.O<1üå/Q”€%ÎÏ~ö³:q…õÏ»å–[ŠÿNµòŒ[b‰zøfˆVø©cÅä§?ýi´fxÅ+^•uÈ D„AüQûcãAX€-–2YäiÅWŒAi›ZaçD˜È²eø¿ÿýo@Ù¦^x®¬—RòÊ*(¤EÚ´qÚ:éÒ&Dæpv¡%iµ(~ѧêT÷©[ð oô#, ,½è²k•¸AI¥OSwûï¿L›ö/ 2,ëxV™`9ч²‡2 ©å~R[!n&Ož­ÇT—X2¶Ðߨ',ðÆ*H×"ˬÈüi¤Ü+(V5§œrJ¸é¦›b¼¥f‰ +Dpb¼¨"íöw«0£¼cq9€‹HOÈ úØ ÝìïÊL¿ä} "°ª…–/X(!”å²Ë.‹ããÛ.»ìÉûüf‰Å¥]“¶?ÆÞI¿øÅ/êýŒ1ò¬³ÎŠ/c mNcÖ›Ö"1ƒ±E÷±|¢­2voÚªÚñqÇx/5ó´kàÒÌ»(óre·ãZQÛæ}¡•ŽÇÂÔ?GÀpG ƒ–¸Ycøäðá1{†ÉÃ_Z!«É£ ž ?žsf¸sA~›E•4ú{˜ªÄÊÁV ¶L (¤"Ft_$CÈ»}Š0R¦­• [5X½GÎ8ãŒÀö+Xì°Ãñ[—´Â®0ú )“ lµAYiÕâ†4!* ¬`z.¥%™•D‰ðà7Ïgroåo|c8ðÀã%&è˜ÈW‘T‘c; – VXÑßgŸ}â¥Ô2@J¾ˆH¹ŸBÚ¼H¼ä0fÛ“s…\Ûâ…Ú#”)¦Í7ÚŽ–›ÐÓ^Ž<òȨÐ`5y„P§"érV$„á>áÛÞ¬"’ÖaÙzvÔQGÅöB[FÉhãre¥× m@Û$R«µ‰fûˆM?÷]uÊ=ðxÿûß_'V¹†’‡åu`Á -­71¡Ú»Ê¦«ûU>©¶cæÚñ[!n çÖ\sÍøø”€ã¢%t!>©ó*R…¸b[‰Ú éÚíEd¹ç·ÛßsÄ VhäÉm[éfÏ•Ñ^c»›Hmú<äJUaë©È”! B¶6JZ!nxïñ.¤­Z±[Úî»ï¾º›Âl½õÖá3ŸùLü ùÁ-±ãJ®Ì¯X†!i\®5zjÜ!l³ï"›·¢²“.ïmÈ%½W¸&±Û¤®¸âŠú–)Ý÷OGÀpGÀ¨ŽÀ %nÖ±RøâØýðڿfdaXŽš}R¸é…›‰6 Âv’¸ÁçÉïÿûXþÜÄ“(÷ø”@ŠVåâÍä[ È ü–Xi4i%,ùÃçZeʦ“û® oªÀ+,Da|5|ò“ŸŒßmYÙ.#+‚xÓü¡\”ôåÏÃÜÎ~µŠ«ºò=’Ö·4m)ùL°Qð!£ðÙsÌ1áßÿþwšTt#-ȪÜ6V•Eœ5CÜh%–´‰ÒÓH ËäŸEší©°õ‹òAØa‰ñ†èyeþXåÇJAaºþúëãw«Äœ~úéõöo¾øÇZ¡èÈ×O7ûˆê”,¤–IÊ›íGXJ±U ¬Ä–gžy¦îGXèS¤a3Ä©ÒM QÛ6ж‚° ²"‚Jù²Ÿíö÷”¸¡ÞQ¤‘ÜxÊu‘ìÝèï¤_$–p#,„rcM.>V?ÿùÏã­Å ƬydYØ qƒuäUW]µXx/Aú"ò²h³‰Ë'pµ„°Ú!ee\ã~*ü믿~´JKɬ²w`»ï"Û®‹ÊN^­dº]Š÷âvÛm‹Ä÷œ_£´¼þÛpGÀpò jâæˆ±äKÝàꑳOtâÆ`Tfq³Ûn»E%™à(…$9ÑÖ!Lßå3# ‡ùþ”)SâÖ ”]¶PÈ™% ´V¯lÒª0­~Џa{JDN´ÊkýܼêU¯Š¾|Ÿ³`Q:8±Äâ©:¡µŠNb±VÈ «»¬ò"6m)ù(F(E[‘lšÂ!·Ú«p–Äj†¸±  Ûc°N‚lA9Î Ž-7Ùd“xKÛçráÒk˜êk XêˆÕ†ÅI( /‘¡…X%†<  æD³õG[7:ÙGÒgêy\Gù¥žr¢zÄWÅÛßþöd07l³¡ß!(Ãú/˜?`……í®*qZ…¸)RÜEˆ`eÚš,-öµÝþn‰¬e ÆV,or¢vÒþž{×㱂BÀ‡ñ«ßnÊY!Æ„k =´Í¶Yâ†v‚#ø±Â!ÇúÖ7˜žË'>¡du ™©‰D›B°Þñr³Röl÷]¤1¯¬ìä×Z”¥ 3ê3eïÑfËìáGÀp¡ŠÀ %nÖñ²ð±1{…e‡-ò“Qµ‚ŸZø|øÁœÓÃí/-qS6Ñ·Ž8mÚRòIÅe÷Ýw[mµU| õ˜ú•@™Â‡ä[r–&[l±E´š"¡f‰›øðÿ Àáh¿ýö‹§˜I¡³[›P@t*P3§ÎôöV)kYÑN±øä¾Û:­²UÊnY°7EmÕZo¥ý»?:'¶ÄÛu×]‰án­\ëK‹›Vú»%n8~tøá‡Ç¢ÓŸéKŒ»Vz«¿³-ÖžðVD$Ù¼}·[¥ŠÚ1qåg†ï"nHKþ‹Ê¶—.»­ï®»îªoÑLÕý.#nÚ}5CÜX?^Ú.¥þb·—•Åï9Ž€#à8Ž@9ƒ–¸¡Ø/¶LxͨMãe]EgÍ›N›wmí(ð¼FñÊýN7«m'e¾rØ0ÉfòŠ)‘:u´{ôˈ›ÜiG¹ç5sM[šÝ*Õ®CȲ’>#ý­:åzJ¬(¬ÅE­‰ˆ=±Å^'w)ŸÖš!Mß7E˜Ø´rß»qª”ê9ÝÒ”{~3פˆ¦éVQpÛÝ*ÕJ·Ä ~º íì©Qô+¬¹.éþŒÄyŸ©í9'Ös4Nu’¸!-|ç0Þ¾á oXìÔ©²²¨MQ8'Î9Ðç=ˆócÂ0¶[q“kí¾‹ª´k›ùqbK*ùQj‡”³éûwGÀpG`¨#0¨‰›áƇ•‡-‰›ÝG.²ì(ªð³ç߉›>,\PlP\ï$q :”‰kÎG¾¾ùÍoFEš­C_þò—#ŽR°ˆ‡óbkJͤß+“&MŠaˈ›ÔoNŒðâ¬38Õ‚íB¬üU•V‰ÒW\¾£°ál× “{*Íœjc‰Ò?MÜ•þa‡PæwzªŽ”|&Ö:™‰•Rr)QéÑç«%”ü±} 7ÝtS<&kœHKª7("¬ø#+<#mÛ°+ÙVI£Ýpdùd=hÙqàv›GÚVH ¥KuŸ7vû™=Y­þð _ºAÜØzÈùÖÐØB»Å’§7Ê®”ì¾ nÈ[³ý=GÜŽl¾§Ät·ú;ÏBðgÉ‚B§o´++H'wìµ% ÓIâÆŽ¹9ò›çA–bMˆYöèúœ…c’NS´ã`L¨öGãÑ;°w‘ÆKÆY½¯ôÜܧµ,¼ãŽ;šk®-4=ôÐ,!•Kï9Ž€#à8Ž@1ƒš¸¡ØÃkÇO®YÞ¬?bŰåÈ—‡u†¯–6:"2sáÜp낇•óïª"õPx´fi³ vø`—N7Ö ‚-"_|qTŒg̘÷ñ³÷]ÊŠfÿþm6Ø`ƒøÓ¦pÚÈŠ)Ûw8à€x”w¼Yû“#n줔xguV¸çž{ê“b&Ê:Ž›|53TÚ©b£üð)e!5·>˜ô^rÉ%áüóÏÊ V l-B¸‡ù¾,BâÅ’?)qC|ާ…lßwÞ¹¾ˆd¾÷½ïõ8åKJ¾%ng ~£Xa!iäð•ôð߀B\•¸4À)³üa^’E~Æê–”L² )«Ðøú_âí°Ãñ“¼sË"Iº=ƒ“]ˆ‹uÌŽ;îã*?©Â/%Fi¡(a̓eH"NŽY”#PZí#z^ѧêT÷±&:¯†xr„0Ûá°¸Aè8hÉÿøÇú‘ðXTAN@âl¶ÙfÑz‹8IJÜØc˜i‹a<rÖ’°ŠŸûìqÃ)7(ÄÚj‡BLÞðáD[çt ¨víµ×†£Ž:*—µÅ®õ5qÓl·ýD7 +9MÕíå—_ñR»ÑßIk,ütíðŒ3ÎÐcû„€¤VÆvê'Ú!cDã¢Þ3J«“Ä 8b £rñ>`ü¤m­´ÒJáàƒ +¯²J|tJ¾pYïGˆqüüð>d,c\©ž#îôž"ñÜ;°w‘Æ<ð¬BÜ0¦“•Eyâ>GÀpGÀhAOÜ¢q5²fÅaØa#Ãa‘22#Ì Ï/œž¬$5sáKÊŒâ ÖÏN7àÄq®ì©/“Ô ­XßHÁJã2yU^sÄÍ¿øÅ°é¦›öˆqƒ¹?b&ò›mZéäw™hBÜ qCº¬âã{¡LRE¸,,÷,qßS;Šç—:Ia¤ä§Ä ÷­/&ê`hS·lQ’²§4 ƒ5Ê«èU‰âCªxàJ*ûÉ6òb­Hˆ> z‘@ÔayÁ :V $°ô)js„½âŠ+b™l<«ÄÐ.q¤œ°ƒ¤½¦ÒJIÓH«NQ‚ñOd­ŸlXðàt«Ô oêÌׯá;N¡å <×^1G!+.ÖmU¤Ä Ï…|À×SY=c톟$Kd•å¹/‰›Vú{qCSrë÷¿ÿ}8å”SêÅïFÇÊq¦ªÐžóª $½¬Zrq &Û9¼Œ¼ àH$KîùôÁÜQçÞƒ¤…Eí9ÝJÕèHÜVßEvÌ++;ÏX¿u\c,„8uqGÀpö2Ä Pm?rí0£FÔÌ /Däî[ðd$nÚ‡q`¥ 2´Š Üìºë®1(>PÎsÂq=öXlòŠÀ?j§&A&¤‚Á®lÙ±2mÚ´¸ ,0·2QðÁ~0l¹å–õgÚ-ìñÇz²‚‡ÀUEÄ QþœhûKοá1¡ÇÚ(̃Çßj+“UW“õlKÜh‚Œ#_»Â‰rÀŠmê 4t"S‘ëø7]V ‹¦N·˜°Â˶DxYß.ŠSö ñÆv'­V+,V+Ô9–©²BÊŒ"ƒõ’UÎ!NhŸXQ¤$…Ò†¬ ­¦Çƒ–µžŠUbP@É3íÎ>BŒ­EÖZ)M§•>’¦aÛ:}­‚ V%V° aÛa‘e[h[òÿA\pÄjášk®‰}ŒkÖZŽßÛ ñåƒB,,š!ntL9䉵ŽZ”zÖbà;ßùN"NÛD°œ’eâñ‰µ¾ŽÒ±…zÆR «“ª¤ éý¹vÚØ¸šµ d–K‘ˆ`V¤àjûÏæ8ù*Òn·þjÞò–·Ä>kŸk×’wƹG}Ô‰äp§ú»-O‡üÈÌAãeÚ[’ gÕ¹D]C®¨±ýˆ¶-i§n•wlƒ’¥ ®ƒ-[Ky¥ø* D#ïÖtL¢}bqH?ÏI£w â´ò.ªÒ®•¾>7Ùd“ ›f‰7¥áŸŽ€#à8Ž€#G`H7ÿ¡ˆÂ¬Ú)äí3{:ú‹‡ÀŸf‰›f!aâŠEJ «œø‘h$+ë®»nŒÃv¶¢4#øÂa‚ ‘Às­ ¸Uݺaãuê;xc2EPñÕèyVñ¸‘U~)øiq•#:¥]tŸtÙb„@¦‰¨±áÁ—{„­[lÿjV0³‡b mÆ:L-K ò€m;lQÀÇDUAI¢lkÄ…è¡ åÄ7RÎÉ3„凼#ªÒJ©š6å¡/ñ UÛØ^ÑÃ)7i?*{>ÏÚxÕç•¥×É{¸jø©õV'ŸÕÉ´sï$NiZ´E¶MB3Þb5Ù›B?¢½ñ|Æ…²q%Í—Æ$Æ!¶B~T•²w ÒèÔ»H饟ö@¶¡â„ÚÅpGÀp:ƒÀ!nÖ±R8bì=P;tÆOzü*?ºMÜ {»œEŠ\7óaNB~±ÝÇ’jXi@j¨MaY€£Á&9âf°•ÑËÓ¿ðþÞ¿êÃsSޤ-§êñN€xº°/LÊsëwGÀp‡À!n½M8hÔÖõ:~Þåáø¹—Õ¥/R²«n•JØôç²ö…":ý‹ïXa`±À*2þTØ ­ZlmÀ$¿“?<³?ˆ7ý¡†V¼¿­ú¨¥eKâFm·Žbu‡¤þìjÙ<ߎ€#à8Ž@B`È7Ú&%ð¸ ñä ááŸý¾RäØÂ„ÿ›ÔE g¨U¶ÅÙxå»7¥¦O>½¿žºÌ%Áwƒ('V6àÖ6ƒ¹Ö½lŽ€#à8}À nRk€ªÛ¤(»&YnqGpÄ«#o9Å©ÈÑe7J„ü!‡} à¿h\Í1ôÓ5_(øÀ÷Í™gžÙÇö›4q(»ùæ›×ˆ©µ“RªÝo2ïxÕ6ä2 ©¯/ü’ÝqÇÑ ³ÝN;äñ;Ž€#à8]B`H7CÙÚ†väÄM—z“'ë8Ž€#à8Ž€#à8Ž€#àtAOܸµÍâ-Ɖ›Å1ñ+Ž€#à8Ž€#à8Ž€#à8Ž@D`P7¹“¤†ºµ Љ›þØ=OŽ€#à8Ž€#à8Ž€#à8ŽÀâ Zâ&GÚPü¡ìÛFÕïÄðOGÀpGÀpGÀpG #0(‰›"ÒÆ­m5F'núw§ôÜ9Ž€#à8Ž€#à8Ž€#à8B`P76Ú&𙊓6/!âÄÍKXø7GÀpGÀpGÀpG ?#0(ˆ›2Âð´éٸ鉇ÿrGÀpGÀpGÀpú+–¸Y°9 î¤xéÓ‰›—°ðoŽ€#à8Ž€#à8Ž€#à8Ž@F`@7"kʈ}Ó Ö,m. |ºôDÀ‰›žxø/GÀpGÀpGÀpG ¿"ÐfÈ@v¦qSsâ¦1FÂpGÀpGÀpGÀèô[âÆ ›î5'nº‡­§ì8Ž€#à8Ž€#à8Ž€#àt~GÜ4CظuMkMÁ‰›ÖpóXŽ€#à8Ž€#à8Ž€#à8Ž@o#Яˆ›ƒFoS;Î{ëB 䝯}×BTé†7•`ò@Ž€#à8Ž€#à8Ž€#à8Ž@Ÿ#ЧÄÍÌÛÏ Ó/øA¡Œ´q˚ζ'n:‹§§æ8Ž€#à8Ž€#à8Ž€#àt >#næ=yWxæÒ_‡9O E¤6Ý©v'nºƒ«§ê8Ž€#à8Ž€#à8Ž€#àt>#n m o^÷äÙíQGÎ>Ñòîtm¿˜ž7]Ö“uGÀpGÀpGÀp:Œ@Ÿ7l‘šyÛÙ±(xvçÅŠä¤Íbtô‚7…ÓsGÀpGÀpGÀpº†@Ÿ7Oœú¹X ýçL ûÕþ[qÒÆ¢ÑïNÜtWOÕpGÀpGÀpGÀè4½NÜLÜéc…Ö6ÇÏ»<?÷²N—ÑÓKpâ&Ä:Ž€#à8Ž€#à8Ž€#à8ý>#nRk'mz¯…8qÓ{Xû“GÀpGÀpGÀpvèuâfÌ”b~Sß6NÜ´SÍÅuâ¦9¼<´#à8Ž€#à8Ž€#à8Ž€#ÐWô q“ZÛPøCgü¤¯0rÏuâfÈU¹ØpGÀpGÀpG`€"Ð/ˆ·¶éÝÖãÄMïâíOsGÀpGÀpGÀpZE Oˆ›t›Ô‘³N 7-x°Õ2x¼&pâ¦IÀIð)S¦„•W^9–檫® ,¨—l‹-¶Ç>úh¸ï¾ûê×ýËÐF`Ĉ±,\¸°#@Lœ81¶Áå–[.¦wà 7„'žx¢#iÅDÊútÅcóÍ7´«t¬áÚª«®&OžÆžyæ™põÕWÇbPÎ6Ú(LŸ>=\qÅ-ÏÚpà cš·Ýv[|nGà‰9Ž€#à8Ž€#Ðú„¸ùå3ÛǬ6:~ú6©ÔdI4CÜŒ5*ì¶Ûn1u”·³Ï>;¼ð •Ÿ¶Ã;„ñãÇÇð×^{mxä‘G*Çõ€Eà¯xE¸9餓Âܹsë8äCâ÷Ç<œ{î¹õëþe`"°ÔRKêrΪb饗Ûo¿ýb×ëj_P’×Yg°Ì2Ë„1cÆúýœ9s¢Ò á7cÆ ¼ò÷µ×^;l¶Ùf=ÂO›6-ÜtÓM=® ¶à¸í¶ÛÆbÝzë­áî»ïîXËútÇÒ !••VZ)†ºóÎ;Ãí·ß^ƒö·×^{ÕÃ^yå•ñ;ï®C¢HæÏŸN8á„øó€„A nîºë®ø½Èÿú믓bìc ìïÒó5×\3ðùßÿþž}öÙþ^$ÏŸ#à8Ž€#à4@ ×ˆ&ÿÆ KžVZ°dÌÖ˜…ÃÂøacÂÿæOôäßo·€@3Ä é}öÙ§þ”›o¾9\ýõõße_XUß}÷ÝëA nP\\ú2%ω›¾©ûTH &„Ù³g‡K/½ÔÞjú;Š(Ê$ëñÇ_eÁl°Øu@ÙÃ"‚±:'¤wÎ9çD'w¿èÚèÑ£Ãþûï_¿M:Ï=÷\K~øáúõÁøR㕯|e,„C'-FÊút·±„ÔÛe—]b›Õ³î¿ÿþHèwú©öÇõóÎ;/<öØc1ÈN;íô^âÂÌ™3ÃÓO?þûßÿÆû–¸¹üòË;J~½êU¯ Ë.»l$(ÿñÄçõ×?U1‡(\mµÕb1,Îýµ\ž/GÀpGÀhŒ@¯7ÃGÃÇ/FŽY:„1‹ˆ›asžÃçÎ ³ž8,˜;³qn=DGйŠõKJÜ`¥µFaR¿Â +Ôƒ:qS‡¢O¾”)yNÜôI•ôxè¾ûîÆŽ[HªôÜàÇÎ;ï·œ@ŽœvÚiõÐê“éu@Ú°eNòÔSOEÅz‰%–ˆJ5ä ‚Ï¿þõ¯h…£°>Q"eu™óÏ?¿Q”Asùå—»îºk,[q®¹æšŽ•­¬Owì!™„VYe•°Í6ÛÄ-Oöv#â"BŸ…K’¨í“mkÖ¬Y6ÙØþ6Þxã®l•:ðÀÃÈ‘#£%Ù)§œÒã¹ýéG3˜o¹å–a5ÖˆÙÇâŽ-f.Ž€#à8Ž€#0°è>q3lx¹ô”0fʆaìjÛ…Ñ“× 8È‚¹3ÂÜGo³ï½4ÌyxZ˜ÿlmåuáK~76´ý7÷í7”ê‚ .h¸å‰‰0«¤våÞ‰›¾meJž7}[7<]Êkj%ÓJΰ’ƒt½÷Þ{{Xïì·ß~qûÓ=÷Ü.»ì²I¿öµ¯­okL·£à„­,ÚöxÝu×…[n¹¥Gü²[mµUxùË_ƒ\tÑEáÁ‡ŽO3üúì±Ç±ìl c{X§¤¬Owêi:ö™Ü{þùçÃ’K.ZŒiD܈$!Ω§žZOúõ¯}|W`ióïÿ»~½Û_hÏ´{„-lXóôGióM7Ý4nw¤,à Þ.Ž€#à8Ž€#0°è*q3¬æìtä2«†ñëí]û¿h_{\3n>≠ùÔ0ÿéûjÜ“7E8uâz»Ä +ñÿùÏJ³²É&›„u×]·G˜FÄ “h”üi`&?oÞ¼ñý`› ÿ1¿g»‰Oîñ œ¡¦+º6lî;¾*ð‚µŽ3­sß\œf®‘.é+mV¤;-vòß-7àLûÂ?–©´ƒ#D[hO>ùd\µOÓïÖo¶(ðl„Õë´}=<°:£-7Ês'‰)Â34Ø|ðÁñ“­X:StÐAñ}ïÌ3ÏÔ­ú'åÀb©2Ô#Ö¾`1uêÔx «†ª~rÊú4‰Q/ôüŸÐ'QP›í;XÄ'W?à‚…ŸŒ+ͦM-9P•ðª:”õiž]EÀŽq²ÿ.Æ]‘¼`AyˆÃv#¤Œ¸±8¤$‰Úk³íªJùÊÂhK!a.¼ðÂP¶m¶F[¡ï3@°6#ô¬ÖxF³q›Åœír¸üÏþ³)ë¸fÊäaGÀpG ÷è*q3réÃ’îƯ¿w¥͸©¶2tÃÉ5Ë›‡*…÷@­!Ð.qÄýä“O. âÏB[+”Ë"â_8CEɵ‚CÅ‹/¾x1ÇŠVÀ&ùY}õÕëñqŽ©ÓHP¸pÌI%ÖúåG«lÁÉY‘'œ¹â°ÕÆç™(rø`hfn'Ô8ÂD@y°e‡¢\*ƒÅäu¯{]ÌsÙ–5LÚ%eJžƒfœkÛùe;N¬!W®wÜqzt,_«8‚ýÖ[o·)AðG‚€`Åœ¶–®ÖK,Ûš£r“Š[*œÂÅö!¶0Y[ê§è.¶)@^Ê¡ªâ¢ôѦíIJ&¶þ–ϪÖ7œÆ£vŒ‚‰Žð<µOÊ ç@\Šx¤b1±ãŽ;Æ8XÒpÚS*´0EˆÏ––Fb­xra¥,7Ó§I‡m#øR{SÚ”•:ÉYN¤}ßô?Æ„6¡€µn·ÝvqÜPÚܧ±õÄ:öÖý¢OÚ¦üû0æÜqÇõ ižšÊú4õ…¥ÄB›Ã?‘„:Ç÷ä˜È/ÚâëhÒ¤I»­‰>C»Âw c4„fâ&%IÈcs‘@îiëÒÎ;/ÚúG(/¼3°,AŠÚ£â†v Ñd…1Ked¼"ý´-bñH›ޱ’ö«þ¥{¶NÁˆ~ Fz°èAÕŒ4‹¹Å†r©¯7óLë8Ž€#à8ý ®7ÃF ã¦n&îô±¦J<ý‚„Y÷ü/,œ×s{S‰xàRZ%n *ä³bA'‚¤CÑ–ÈäeÉ7öD4~C°0y·Š’=™„É=m+–¸¹`ïÛï(3LÜH˜‡êI¢ø°]$¬¼ÚS{ŠÂéº-¯ÅS÷ígδ¶À€Ë‰'žhƒ×¿k;LJî”)y"0š!n„ J D†%7RâFaë™L¾áˆR¹çž{Ö•ž$ZT¤QŒ!*RåMeg¨œ( þž î¬ÐÖQü¤pÙ{|§Üç×|µ¾k]b¯ë;ñPàéCV0"FŸU‰p죸U?q^œ*Ÿ¹¸ÔÅ«_ýêx«j;јKkêsÍôiÆþ¢z!]”m¶dY±}«"6 ß©—W\q1RHáhëlç©‚qȧ¯ÔÒÉæ©•ñ ¨OóLÈ]ô/Ú¡HݤÄb¼YûCÿelÅò‡8"K¸‘ i©´ª7)IB{%ÿEb‰X|é=rì±ÇÖ£Xb0%fì{¢ ¢QyVrzlŸeÛ" µµÜ¸eë4÷~j…¸isˆcüÜ /•Û?GÀpG`à!Ð5âfä2+‡¥·xK·zñÄ,׬»kWý©¶eêÜm¿ÖZ%n8 Ëe{Tkš%)éL’±.Љ*)qc–¢(ÜSÛÒ©=g&ž|"Lª!o4é¶k=« ”o”w&Ó¬[¿¤OÚla• ¬p¬H‰Ô5VÜ ‡@‚ Ða™~8r„þK.¹$~oôÇNê K™È7eG9˜ZÛN¢úá~ê¤?7ä£x Ndh+N«8¦Ê%J(¸Ô íÇ*ž$n ñé" ê:B¨#”B„v…µyCÈÊ Â=\âÑfi3òóbI5ˆ'ÈNâODý ’¡‘Ð/Eª’?‰òÌïÜuHúeÑé;„­ºå«=¶m¨¿ÑWèÓ}¤jŸF‰†ŒÑE[ ÿ?íKÕYê8í{(ó7ÞxcÜbÅxC|+£Ü‡¤¢OR)ð)cãå¾k‹Ö)|š§fǃq“’6lÃÒFã'÷_óš×ÔýÒÐi'Œ´!Ú „„x–¸Ñu}V%nR’„z±1C¾Ø**k)êG¾YŠˆúéÞ{ïã’Oü¹ÐH +'ú×!×Ó~D»Ä:±cx®-ÒNÁˆ÷JÚÎÒ“ÂÒ:¥?³ @½“ÆÅª¤Ÿ0N?aŽ¥ Ž<Ûž*—¦ã¿GÀpG`à Ð5âfô ë†ewû\Q;IªyaÆ“á©s¾æ>VÝñe3é{ØP'¬Q„ $‡Ž‡¸a‰É;‚Æ„× æäL†™8£0 ²ÒФĶˆ0™…4A9¶‚r!ëY(¤këÇÃÆ×VÒO&Â±Š‰2-±Ä ÏåùÊ3ÄJ®„I?«½RO8á„N÷‹>ÓI=ŽJqXjÅúBya[š¤¿7(XgœqÆb I;8²Uc­µÖŠE‡”Áʺ” ´QGRâ;IÜX’"%x¾m;v›Õzë­Wß²”sDË–/¬9”f‰¶¸ÑÇÚQ¶dq•*”Ú¾˜^×óË>í‰Si›,‹§{Ö ‰¾$¢K÷«öi‘ÂijʶÒÁzN$mEÛ_¸oû¤ í‰O‰%A¨òIY%ÖŠåò¦]±y"­fÇ›gù­²mbK$ òj-Qrcá -©Û%nŠH’ø€ÚÕE>nŠˆ⯽öÚqÛßuZš%íSKÂ!v; ÎHÚÓ-f„I­ñ¬Õš­SÚ¤„O'¥qÓÉgyZŽ€#à8Ž€#Ð?èq3fÊFaÒÞßh©”Oœú¹xÊTK‘=RCdÑÑ qƒbÁÉ 38EY·b'Ìl#aâš#n xP0‘ܶ®Ûɾuzi'Ö(€(X© Àé¹LÈ™˜çDD÷,qc'õEÄUÒñ A‰Ô3™?ýôÓ³Q´Å$Uš”ßþ´UŠÈWIZ˜vp´ hîˆ`že …N7²(#)tBŽ}.¹!Þ2ß:1@ò§Ä ýRÖ–X…Üâ:b•Ô$ ÙŸŒX‘6í‘~ !ÐŒ4CÜõiž§z)kÿé$'Ù¶ïA§[=±RÐÖáa­©r[dšÁCamžZRâ†z™2b E=OKˆYaÖªóv‰K®äÆÊvˆÊd =¶Ê2.÷²¶„£mÚ4Ä Ö)ˆ}¿ðþ‚ã3kIhý£Ù:ÅJ'=½-M§•ßNÜ´‚šÇqGÀp6Ý%n^óu6ø7‡PmõÄiŸwâ¦9Ôš ÝqƒÕ ~?´í!]=—2‹u Žq-b-nØÚõŠfj¹£¡d ZIå»XçVÝ c'ÏÖZ‡{V¬oKÜ i; d•]™W|ò!‹ªV 6_9åQi[…Œ~dæß‰kÙ 2ðÙ޲)Û–g¿Z…gËMN ç>¢0˜òqƒÖ`õNýçǨ"–\ÃÙ5¤›DÛǰÐ`Ë a‹¤Ä ~@°N@ð¤vKŸßi§âuk/”üÁ¯ J±¬š®¹æš€R³Ò qSÔ§!_œ„òd ¬wÛ÷Ò-ˆÜ·cUÎÊŠ0"ºAÜ´2ØqÂúí)#«-ÉQf)¨öhÛ7¤R…DÐ;ƒ´,I¢´„k+7¤Á8ÀØ( H®ñ,mâw*"Óqþ_ì{'Ï–©m·Ý6^¶V=¶‘¤F²&MÓå7y‡ÔgìËIÌsñüš#à8Ž€#à \ºFÜŒZ~í°ì.Ÿ #—^ä ¢*DóŸ}8c¨‡k—¸Á/“Ä*Yv"‹oN¦±Ê%nìje•ú`Å“ ?b'ÖE„‰% ,ñ‘>Ëžb‰Y¼¤á‹~WUä줾hµ›gXË믿>à¨é¯ÄMÑŠ|«8²ZŽ2‡”)OÜ×–»T)Ó,qƒÓTùe"ý*‚’¥íÖÊÈÆEcû }#gí&EÙ¶u¿è;ä ŠÂÖ2ù ‘¯®“'Â!ºÎsŠSÂ¥~~ŠÈ Â6’fˆ›¢>ÿüv œÌ„ƒçœX2¼Ï>ûìÌö=¬†d‰£4ìXe ÝçSCÕþnãæ¾Û<µ2XâÆ¦_F܈mÔÎdñF»-êß<³ ‰ g¦}Ty®­7¤ƒß+Þ+;nêš>mI±²ï—"kPÒ±V£–H´uškgÄiÄ÷");Æ» æEéúuGÀpGÀ˜t¸°Yj³CÃk-òoRž™·Ÿž»æïµ#Á{ú;©ßÃ5F ]â†'H!gòϪ-“{ùEà;׸g•!KÜX…uÂæ„í X ðŸˆX)yXC ¬"(ª8µÍ‰Í‡%nDGV ¹øÊÖÄo$vRߊ¢¦|‘§þtªT‘b§ü‚K38ö%q“*€Eù¦îÕv!DÜPVÚ(Îlék„KÅ®Ðë^«ÄNëQ:U?É;–R9A)Åé«òn·*æÂ7ºÖ›Ä yF1F†*qCÙ‹¬£8L$^™õ•ún»ÄMIB>‘N7–Ø#Í¢w÷ìËt¶ï—n7,,฻LXp(z/:qS†œßsGÀp']#n†Æ®¼E˜¸ó'ð‘c*¡·pþœ0ýüï†Ù\ÎÉig¥È¨2 n¬ÏV¾ñ§€³^ÄúV("nìÄ™ø¬ W;±.šœã»stÄZ¥Ï°ÇÉZâF¦ý„·[NÒøÍþ¶ÄMÕ­8·d…‘2U¶}H«¹öä"âZ’JŽL¹ŽÈ:¥ê1ÏÄÑ–‹2Å®µJ_VV«¦«ù*“] 'ß»bn·JYÅß^W¼f?ñÕÄiRXÃð]’n3l•¸A×¶ miâòÙÁ÷Üu,™RU„e» ýByµý™û­H'ˆ›Nn•ÊYBرª/,nZlŸf,„¨ÂÇB¿<÷Üsã¶2[gÖPnËai/X²uÂÇëS’Dùj—¸¡ÏÒìV)Ò†P“TØ6¨÷`J^Ù÷K™µŸ%xm{±c|®¥yiå·7­ æqGÀp@׈`±äòaü:{Ô,oÞP ¥ç®ù[˜qëYá…ç¯Þµ†€&¬¹íiŠé©RòÁ„L”q‰‰;+žÊ Ž‹« Y‹›njª#–ü±ë"â†üÉIk‘c·Iñ8KÜprpÄ:yÚøc'õ9çÎJZM))¢“¶ÒëŠG}P/”¿?7íਭ”­çÄR‹”ZÅ+%hä«)%ƒ„s«Ÿ–ÈJOqSf Sö\”m9 ¶ 9mB>{ŠœH+]Ú –6l“Bšu°¬tÒÏN7¤©z)³8³Ä„Uœm߳וW;VYE\÷ùT›êÆV©VÆK܈ŒµXCzb©hO/³–)àHßJ}©p亜5Â¥‰`·­¦$‰Ò®­n•²Vgô+Ý”›-G”ÁŠHáœóqû~Áâ¥È91þmt„¼íWÚ™ÍG«ßaÞjºÏpGÀpú/]#n˜(¡Œœ°b·ÆÎaÉ ÷ ÃG/‘EbÁÜ™áùþfÝy~˜ÿÌCq’E\—î Р↜Y?5ªïTI¶Ê%nˆ/ß$Ä…4aeß æü(´¬&P¶;±."ng&à¬h³e Ò œ˜Z±ÄÍ2Ë,öÜsÏx¥'—öh`n`u´ñÆÇ0ýœëo$줞[©òε²ãÀ­¢–[½Æá3ŽŸ‘þ@Ü´ƒ£=r¥ÙãÀeDûâHu{œ;øXk«”¸¡îÉ;’ó퉆ÕʯŽlÇ10ñ _°v°Û§HËžè“:ÍV~ g}æð»ŠÐ¯sˆ­³^¶FYKœ4]YQq½ÈR)Så·%RK#âWíÓ69K:Æê€1ƒz·N³mßëÄ 8´3ˆ¸!m[å{:&CðA Ë¢Š¾…Å#´iC{.?IÄÇ¢­ÜoD"èxz9¬'N*í7Œå²2’?+|!i!!m'֢Zl‹«m‹ä3תžkgiÙ[ùÝóVÒô8Ž€#à8Ž€#пèqc‹=bü¤0jÙÕÃØU·£'¯FŒ_>Þ~aÆãa„Ù÷]æO¿'ÌwK [×¾wЏIOÐ!Ãé¶§2âÆ:qE™d‚Ía”M¶–h‹‡UríĺŒ¸!?²Pá{NXå–¿KÜÖZG'HòÇö”œØ"(6¬Z§ÄN¼™ü±Ê£â¢°$е0¢¬FX  8V=›çZÿG²@IÛCš¦Å‹{øá)Ú<ÎÇmþ‹Âr½SÄ õ™«q‹ ‡™3gFëÚ¤’n¶ïåj;Võ…Å y×fÆKäZâ†~Èö2$µj„¼Æ²JXÅ@æù ?1Æó½UâÆno³ýÄ<*~m•¸|bŒ =Ð!Piç” ÂHå?ï¼óêGØÛ sþÏìûEùdÜ`ìˆeܱíŒ÷‚~¾Q;Sší|:qÓz×pGÀ˜ô q4Æ ÃÇ/F-µtíÇ"El؈yaX˜f=ðXX¸ ìåÀ„µçºSÄ ¥´Ž.±jAµb•¡Ôâ†pÖªÂÆ³ßS‘vb݈¸aâÎ ¼]A&mòŠ… “},\”¸AÁ¦|<¯LR²ª,¬Ôã{AÛrqrlÉ«éòi’Æ£<ü§Ü©¢^¤ä‘†üÁtÚÇ i·ƒ#Ö+XYFN°†âŠZJܤ§"¥ñ ‹¤Ä × æÀ¬èÙ„! ¬±hOHJ6Å‹™?œ©c2pË-·´—bºiŸêÀüñ“n7‘õPÎr@ÑmŸÒµFŸe§Þ¤q;EÜî”)SÂ;ìPZ/é˜A<Û÷ú#qÓÊxPÖ§Sò1uVŒòÏéiê`„@r^vÙea½õÖ‹$e;Ä%I,ù¾èI/ým•¸QÛ&¥´|v+$d&¤ý”¶³âŠ+Ƈçü—Ù¾`X)·/}ã>N„­4jg6l«ß¸i9ç8Ž€#à \z¸aò‡4nʼnaÉ©‹^fD>jX¸ÿäEþP.Œ+çÍ7¬hâ{É vrŒÕ~+¬#éÄZá°@iMW™lst2«ßVPÈå¹Ì¡§Ã–H$”VOQni“œüƒbý(.yb¢i|ªÀc2‚Ò_UÒI=éb)# Òxa—´sBžÀK!›',~ðÇÃ6ê-%nìÖ¶TaqÓŒO;ÊÐŽ(XP‡VPš°H"&)qCXˆ¶×bE€¨Ã*)²À‡í3}6utO­½_}õÕuÒ&&TûC]¢PÓÏl½‚“”â¢6CûÀ™«¬yrd¨ž“~ÊoÇí·ßó¥ûrVë¿ c•U]kôi-<…•ÕáØò†…Œ•fû4ãõ¢í>J‹zÁÂ)‡¸oû:ÛØ )­Xe¸«FƒM¯Ê÷vǃ²>Íó­c`Ú$#ã–H`ÊÙzß@@IDATN[…<Òö¾ªý›>¦m¥`Ï$’eb:æ( ŸÂ•çëøv{ßZƒ{ì±ñ–%…Šœ« D ¿Ò.äK*õ<ÛX`l ýªO*D( H[]oÔήÏ2ÌÛI×ã:Ž€#à8Ž@ÿE ×ˆA0fÊFá•¿ë9q’¶ÆxK„ø÷~ÙNMx\GÀpGÀh>!n6úÌì0aE[ ÈðUŸ~(<=mv³y÷ð-"Ð_‰›‹SMÀ2K+XSp2R¦ôÛ8í~/RÔÚMw(Ç—‚Ù[u8”±ö²w¾°:Ä/ ¤6$[} p¬Ø“î¿ÿþhÙfïWù^…$©’N'ÃX Ñ¢-®EÄM'óái9Ž€#à8Ž€#Ð,ý‚¸™~ý¬põgn6ï¾E† qƒúTX/° K&çÖ·ŽVÙ–Õmé E­Ûeêëô¸éëðç·Š@_öølȬõ![°,”ÓoîsÎ9ѲÙ2Z’¤h›l³i¶¾Ì'ÒvâFHø§#à8Ž€#àô'ú„¸™°î a£O÷´°ñíR½×,† qÊ2Î^9.¶HPL°Æá8ÜÞ¾RÔz£l}õ 'nú yn»ôÕxÀV<|?iû_®lÂ[üZ{$w3ެ[yVÕ8ø´‚°g«$[csâÄM¿æ8Ž€#à8}@Ÿ7:õsƒÅ –7.ÝG`¨7B¿=΄\ÇÛâ ?)÷ÔV¦Û¯Ÿø–3dœx¦ŽB»ñÌÁž&Û:ð„‚ÉöG`  Ð×ãÛE±ŒÁ¿dd –78n†ÌÆJ±U”š€vÜqÇxíµ7ïâ8Ž€#à8Ž@_#ÐëÄÍ’îæ=yWXußyµÿ/M §_?»¶]꡾ÆcH<¨7C¢R½Ž€#à8Ž€#à8Ž€#à8ƒ^'nVØÿÇá™KrÛ¥îþËôpן[3Ë”µÓ¥B9qÓ%`=YGÀpGÀpGÀpG Ãô:q³Ò»N‰Ä V7ééR”Í·Lu¸†3É9q“Å/9Ž€#à8Ž€#à8Ž€#à8ý>!n m°ºAR_7\sËPèž8qÓ=l=eGÀpGÀpGÀpG “ô qC n pÆMº=lñí+““7‹AÒ± NÜt JOÈpGÀpGÀpGÀè*}FÜÈêfÎÃÓÂËß<1¬þ¦‰Ù‚:“…¥­‹NÜ´ŸGvGÀpGÀpGÀpz >#n(!äÍc'}8¶Œ¼áÄ©§§ÍrÇÅjNÜtHOÆpGÀpGÀpGÀè2}JÜP¶óÚzËÈÂñÓ§„HóŸNÜ4™ÇpGÀpGÀpGÀpú~EÜ€*ae‰Ãçôëg)º6@À‰›ùmGÀpGÀpGÀpG Ÿ Ð/‰aS•À!¼H·ÄzÅŸNÜcãwGÀpGÀpGÀpþ„@¿&nÎ2 7«K¥ŸîиžàÄM9>~×pGÀpGÀpGÀè/ âÆ‚Õ ‰ãŽEî¥ïNܼ„…sGÀpGÀpGÀpú3ޏ±`Bâ EG‰sÏÉPè)NÜôÄÃ9Ž€#à8Ž€#à8Ž€#à8ýMÜXPËüá8yc‘ mo•Ze•UÂäÉ“Ã2Ë,ÆŽ† ÖóþËÀ,\¸0Ìž=;<ýôÓáÑG ÷ßÿ.gÝpGÀpGÀp:ƒ†¸QE88/¾ú3)ØþlÕâÂfuÖ ãÆÒøyᇳfÍ ·Þz«8C«Ú½´Ž€#à8Ž€#à8Ž@¿A`Ð7 ;qãqaóoMY d·¼YI+Ä͆nV_}õ˜À³K>æ­==Œ\}n5©fm3b1¨»záŠCžèjúž¸#€5Ù„ ÂÔ©Së`Ü}÷Ýá†n¨ÿö/Ž€#à8Ž€#à8Ž€#àôƒ’¸p›kÅÅN¢ºú3׎Ÿ¥ Cò³YâÆ’6On~[·í¼>Å͉›>…È=|µÕV«8NÞ ¹ê÷;Ž€#à8Ž€#à8}ŽÀ &n@7%o|ËTs>nص馛Ɔúôn·„Ñë¼ÐçÖ‰›>¯‚!—KÞ\{íµ¾mjȵ/°#à8Ž€#à8Ž€#Ðw zâ&·mj¨[Ý4cq³ûî»GŸ6ýÁÒFÝĉ!៽‰€È|Þœ}öÙ½ùh–#à8Ž€#à8Ž€#à a=qCݺÕMÏ^•¸‘µÍ‚—Í ³¸»g"}øË‰›>ˆ?ë3|߸ÕÍo^|GÀpGÀpG ÄMÎêæœ½îêE˜û×£ª7[n¹e˜2eJhÖÚ†íh7ÛµB;qÓ5h=áÈêæá‡W^yeƒÐ~ÛpGÀpGÀpöÄ 0¥V7Cy»TUâFÛ¤ž9ð†0jríô¨’žÚUt4{ƒdÞvâ¦!D KpÚÔ&›l|»T—ödGÀpGÀpG`1† q“ZÝ e'ÅU‰›×¾öµaذaaæ{olxä·Hp^ýMÃÓÓf…»þ<½ðhöÅZbœ¸i,ÚqvÚi§°páÂpÊ)§t|xøÐ‡>ð-uÌ1Ç„n¸¡gÄü:ì°ÃÂRK-Î?ÿüpãûu•Gî³Ï>1Ï=öX8î¸ãªD´a¶Øb‹X·ð—¿üexá…E§ðÕ­Â?þøãáÿøG×qi¥íyýV¯–5ÖX#¼úÕ¯Žþþ÷¿‡'Ÿ|2~/ª›ò˜1cÂöÛo6Þxã°ÜrËÅ“ë~ýë_Ç +¬°BøÀ>,X~úÓŸÖÓµñ»ñ}Ô¨Qá=ïyOLúâ‹/޾½ºñœæ¾ûîvÝe—pÖþN=õÔ¦²íïÓ^«¢Aû !CÜPƒ s§¿æ>³h2}ËŸÏÜÜÝPl9Ý nØz†lþ­)õ"‹ÌÉ7:Ü—/¬tªJo7LF.¸à‚ð‡?ü¡jö:nÍ5× K.¹dxöÙgã ©(Ñ7½éMa·Ýv sçÎ ‡~x=Ÿ·½ímaüøñÑjê‘G ŸûÜçâýïÿûaâĉEû3ŸùL=N»_P2FŒ “N:©Ý亿*¶Eh‡¸ÁŠíío{¤ÿö·¿…ã?>ì¿ÿþqÂÍçž{.Ö_€mþØsÏ=ëJØ'?ùÉp÷Ýq8þÛßþ6°…ŒI=¨Þ&PÿøÇƒÆµÿøÇ+[³eùÔ§>U'nèÔ#RT·(é_úÒ—b˜ï|ç;áÒK/ß»ñ§Õ¶××õÛ ,º•æÁ=ôИü7¾ñº¬¢úW>Æ ú‘åëo|cüùµ¯}-¬»îºñû5×\¾úÕ¯*XW?'MšŽ>úèøŒ /¼0üèG?êêóúkâ(Ô¿øÅ/bö°¶|Ç;ÞßÍUó;Pqü¾PŒ­@䉴×;ù½híä3ÚI ÿôMÆxòÎé„´ÛÏ¿ùÍo†µÖZ+ZtÐAÈR×ÓhwlüË_þ'É(söÿÔˆÕÞVû´¿O{£v÷3†qÃàÊË–mò‘°Ã;ÔƒOŸ>=¼ë]ïªÿnçK»ý\ïætý…¸ääPpúá¸<펖¸an{Î9ç,öŒn\hulô÷i7jch¥9dˆ›1“F„±“G…ÑË c–kyÎSóÃìÇæ×¬p„ÙÏ25ß âðäðYd ×rŠ9ÑË^‘“³Ì!T+qÃÖ,0™—4"n~[#—†×È%VCÿûßÇhXt`2‹œqÆá_ÿúW‰Ö‡?üáhN{ÕUWÅŠ°Í?oyË[f­F¶™\Ç£·‚mQ&¤4»UŠx`L›6-|ùË_Žßõ“á~ðƒmlüã;¶•‰ô™lªMÐ>°äê”ôÕD’r™É´•þHÜ”ÕíÊ+¯\·d ¯vú¯¶×Wõkët |/"nÊꟲýîw¿ &LˆÅ|÷»ßžzê©EÖŠùóçÇíéý;ø£U夃Yh:©Ï~ö³a«­¶Šñ>ö±…ûé4rP0éG¬èŸ~úé¹ …×:Ž,b5[æB0*ÜèÄÍŠ+®Ž:ê¨h]j‹ÐIâ¦Ý~Þ‰mŒ]‡rˆ….~owlÜtÓMë[¥ì¢äbêð…Vû´¿O;\C0¹AOÜŒ;,,±òè°ü¶K„^9>Œ_mtjžQ³ºyì¢áñKg„™Ì /Ìnßú Çúán7Õ³nÉÁ Ëœô^n«•⤟ƒ‘¸ñ¡²âkŸ2âæå/y´6 Î÷¾÷½ºO”¯|å+aÕUWÛ§ÞûÞ÷*É®~~ýë_GÇã«ãÓŸþtWŸÕlâ­`[öŒV‰›÷½ï}“Úo}ë[áòË//{LÇîAlàG‡ö„eùè¤ôöDd饗ŽCL¤%LžÙþ‡ôGâFù,ú„t¥\v{LQØV®·Óöz»~[)_‰SDÜ4Ê[%é§X4ôÖ˜Ý(OÜoU9©’v·Ât‹¸i'¿Gü…0Wd1…[þºÚÁ¡jÜþFܰÍômµ­¯Z$xæ™gâxÍïN7Uñ) 7‰›¢²èú`ý}ªšõÏVÔÄ͈1ÃÃRkŒ«4!,¿ÝøRŒ¿dF¸ïøgÂswÎ /ÌYPv ßìÄMj]“Zá”aÜ âfÊ”)Ñ åé§Ÿ.|T»[¥XIà9l‡)ÛŽ„ª­+lŸa¿4%VâˈüÛ@ö;ßùÎú3´}ª7IauÑE^LEÂV-&±`ÒìD2 K‘²:KŸÛ*¶i:öw«Ä Ö6Š‹" ‰Ò‚#d&ˆüé4z.R¶Üá$§eymw"³°2¢~ÙÒwï½÷–foçwŽNœ ÑÁ*èvÛmöÞ{ï¯Ä +¡S§N“ôÛo¿½0­*ø`kñyçw>£•í´½ªõ‹tœc9‚•}¼S¿M6Ù$°½÷X,Ù%–X"¬¿þúÑ÷FUßMŒÏŒ±üGù¢Ý1ÞVòC{ ÜvÛmu ™v‰›;-¿c”‰m Œ{`Åv«ªã,8Щ;aÜ)ÂR¼¨'Òøá‡ëï«*x7¦Y↼Ñvî¿ÿþŽ8ìoGòÃØË¸ÈØKÛì„h cŽA»h”.þB–_~ù@xùmj7Â⃶NÚ9iuµiÑÖpŽóM7ÝT¹oÛ4ô]V#üfë<[ð´ÒÄ e£Œ92nK×ÖµªÄM'Æj¶n2²-¾¨ÁKØYܦLDÜ´;6¶Sævû´-_î}Ê¢Válif~ѨoÚô:ý]}´ÙñPã y/›#¥ù¥ònà]Ã;‹÷CY{Jãó›÷i0$~™¾eãwk¬µÏèÆ÷AMÜ,õòÑaµC– “w\²v^8#Ü{ìôðÜ]s+…¨ú’¸3m©Âêf™ÆEŸ7ø¹IÉœ"|›!nPˆP\ññÿ÷q+Š&E¤Ï¤ÚfdŸ)2"õqÄŠ}Þ()§Á°%IÂöW¾ò•qÒCX%æ'?ùIT8!sx a¥‚ˆ\@qÀƒ—0Ê(/‹2ⲇ¸8 ÄÙ,¾eüôLÒÖvíµ×Öè< Š0ÂOˆÊƒâ›žh„â‡I*1dfL9Xåĉ믿>†c×°š"Áé* įýëëNäÀê2™œZÁÌ]V!àÌi*ä—rY_>6NÑ÷V±-Jë"_ªn•Ò¸(Möµãp”É~hÚ$õ€à ¡ q"”ÊÙ>LŽ˜d«Þ †r0™Û&¥É}2ð£ýhØf›mz´#úu§’›ˆ( Š-ycÒ„0Ùµ§'á̘ÕL&€ò~Úi§…“O>9üêW¿Š—Ï=÷Üú‰"nn½õÖ€uä ûè»AÜà€’üÙí‹äÅgÜ©3á"…£¨nUf”mÕ7õK=çdë­·²UɉvÛy(«_îCºá—‹2X¡ÏSlûkFl{Äwc" ¤Æ5Ú8í_(f`Î} íø†¦…#K&²}Â7{x3®¤ÛIiÇ8ûg›ÛÙRiGæ)¤›bE>˜‹´ê· _óš×ôÃÈ;>k‹Ó²¤(§åã7í©Šï”É“'×}éÑŽèàb*!-æ:iÛ,G‰KûÆÂXã Ä©íGÌoùÍüÉ ý‰9c ޾«–…4 ¨{ÆÕØ¡pw’¸iÔÏQŽ)[ÚÎ)dí§Ì9q+cµÙžÌoÒ±ã ϧÏX¿™¼ë!–rB}kœlwldË9'iÕg+eVžÛíÓJÇ~Ú÷)[ãq°ÁØ qq’±‘y»„y0îcŽ9¦ðTSÈ«í3öV]èlv)˜ÔHT:ú´y`E²áô(œÝÙ=äUˆåï’K.‰Š =iå„ã¦Q€&=Ë.»lœÄBf!´ &÷¼Œ™(óBáèY„ÁQÛŸ4 Æ/þa0å\\kù#2Œ²êEbãñL˜¤Ù— ¤é (ÈòýÀo&Íœ„Õ*¶<«Hš%nPðÁ»H¤àY²(hˆUð˜ð0y¢-KdÒÎïœs>9ﳓ%Åå“ í™úc•%*,÷™$¥¤žˆØS¥(3mR¤ ` /`ÚN‘p\¹&4Vi$¯\?å”SêQ»AÜPN¼(NOƒX“)Eu«x|Ja¢/¨ýÛûôOœ1#Œ9øÃi$í¶=Ò/ª_îQ.È!Õ1×RI1Jï§¿m{„”† É ÏEQAyÊIÎá Ȥôåâ1ñc2-«æˆ#¾X{Ÿ,"°íu}‡ÐFqBÔ¯ùž«k G˜Tì¶)ëÀ“üKyTa¦ßé'Dä~*VQLïñrTmÎöÁ\Øôd1ï«à¥aØ.ª÷’îñ®8Fë RÁ>äÂ8B1Ö@b Š uŠUGªC. ï%Ú+¸è¹þÙ.Žn¸a[‹°bìE1¦M7#lïÑ!EñèGö$C¹ðä£ qƒRɰ@ĶåÚfÑ8J´õ©µÅ"Ë!Âj®Âǯk¤î¸Ú"HNh'ßÌwª–…tx0ÞZ2¯ÄMY?‡`„°ÆJ '”ñr'W¶VÇj-¤ v¤Ÿú ýUGÅ Ÿ¢°z÷·;6Z‡Îx`ǵZfi·O÷Ȉù¡÷)s8Nƒ%9¡>Ñ_D6ÛÅjó´Ôî¥Âó=úInœJÃó»Õñ¸­Ì‘ˆ9Í\Õ.ˆqÝ „( –Ôea‘EÄÎm<}/šwtk¬Õs{ãsp7µfÉ©cšïZ6,·e~/÷É+g†;~óTxþžšbTküƒQúq#‡Äà‹3c{Œx#Ì[!n”&+(|¬ôà¬u¯½öª ¨¬JDŒX‹È&!+Il=‘°¨R^b1Lv!-öØcú© „·ÄâÛÏFÄ Ê59„ LZx¹“G”V†—)/ $GÜpIÃ~ûíÇ׈xð`B áE|,o,i@Xå•ç±Z*qÃo^ú¬c9AÞØ"¥œ ñ4ù²Ä qQ`ˆ iijídšûÍŠò[fÍT–f³Ä ý+L¬ !¬S.„‰-/§Ü&¨ý±+̬ÂPdŸ}ö‰“%¾CΉˆã7Bý1GÀúKE3]'/(†X×@’`á"2‡{´qkÊ«‰„ŸˆüÍÐnd)tâ‰'Æ …žÁŠºm+Xiñ<”4ðEq³ y#¥±ÓÄ=Q…¶{ÅW„óÎ;/’ˆ»ì²K4k[D !ß‹޲º%"’%7éæ¾c€bÙH:ÑörõËs™ða!b…öŘù÷ŠW¼"¶Õ!cTÎZ+—ÿ´=B†üóŸÿŒ$ïÞð†0õE¥Mqyî 'œIÝÝvÛ-P?úœøYâ…öJ»¢­c…I»¡Ã8óþ÷¿¿¡l‰Fêè²Ë.‹m‚¶Î3EpèÙˆÆj,†”Ú[ˆ ÆE­`—)t8àeÌD°Œ£x`QD;ÖbÅÿþ÷¿8öÇ€µ?Xè¹\Ãʈ:‚,§ªs¿Q$Œ„qBFõÏ$û¿ÿýoTüxïÒ>´Jšº­7(”Å"Ï µ¥ø@BQo”… „²B„Qvð¢}‘wäºë®íaµUDÜ´‹#ýCxPw,‘WÆ^¬¨9¬V}ôÑø»Ñ«x’óê’•:&÷ …9­ ¡MÒ6©M˜ËÈ¢–¾Te§%n”OÊÅ܇¾Æ;@e>ûì³ëÇ­>7ŽÒYÌPý0.`}BþîSß"äx—ОYØb®A½kîKxâU!¡›Ç´¸)ëç²|!/Ìç(cŠ(–‡¹Ÿ–­±:íšgãCö‚Ø1†zB1ÿB¼…H£ý²è‰0öòCrïÆfÆÆ"â¦2·Û§cÁ þè}ªÛö=”Ö%m˜]šs+.õËXeßm¤gÛOq¡çê³Õñ°Õ9m‚yƒdYŒen)Ř'r’mO¼ß$–¸á80&>õ½Ë.;÷XXád}Ivk¬Uþzësp75ô–YlØøˆÉË›f„ãÁ¯?òÑðôM³›‰6 Âêå¥IaQæµR3óý7Yìz#çÄŠ`-nRGÅ SôÙ*qÃj ƒ€ÒŸZSx™ðÒçl•¡”¸A)f‚°MeÔŠ &ŠV˜éH×v‰”H'ž…é¯- u\ä㦈¸!¯Ú~EºXW0ˆ2°#˜`ZÓÍx±ö…“ Š «wKÜS3íL7VM5ÁFÉ×ÄÑ7´Q‘ŸN‰ê©·ˆ廑Ÿ‘ÜFqQ€ÀSJ„a^äÜãÎ6(&VPf´ÕÁNªl;1g&Ãv+a­•BºJ®Éφ¸¤ú)¤Í‹«žýë_£bmŸyôÑG×'š©âF8keÄïFJc§‰¶÷i2š®H“ûdÎÆoÔÑwÖH‰%nðÕ!’Õ]»MCáQFG}¶È"B÷ø„Ü`@FÚ%nPò°‚``²Œ4i·CÜÀt3PB lA0±be%&þò“Ãj—”$‰¸)"À Ì,j´åÄ7l a•.Yr¤×õ;%0tÏHÜo&o`M}°²NÒ*º²@x„É1‚¿& ©Ø‰Vb´ûTÀÿ;<eW–e„ÓDÌ™„`1 ‚)7¹ Ž”àôÅÊ= [;ðA4R-‘b_ôJ«™O&¿ÂòUÊ@š–6ôH m—Ì)Äk49% ¤1–Hê#^¬ý@ÉÒj¥®7úìqc'Ì(Z´yѤ‰™UÎm˜ô»m(g(iV¬”Ü–1mÈl„I¿¬"Ùö&‹0” ”äTX ¤3þÙvÎJŽšÍ)¤Cß ®Æ¦Þ n,¡ƒ²‘+m¹yBà"šx‹p“?GydÝ2²Q´Qµ=sVÍÙó›k+ò9±>xŸÉ7Z·‰Ûnéçv‹r.ŸºVDÜ´‹£ÒÅú‡q,'ß´ÍÁ’ oÇ0æ<²˜IÃBêm­éqƒÅý.µ /Ì]d-Æ{Y Cé8Ê¢™æ_ÌE¨ÃTØ6É–m$GTqe¼é«)¹ÁýfDï°ªuR%mÛŸí{ R _HnÌã:sTù„KËfÛ|³cµmžx¦„<±-:&¯VúЏi§Ìíöi[þô»æK\§¾ðÇ’Š]p³–Ô`ÌœŠö›n—b|Ð6©ª¹<×âTu<´ãK³s$æ°X4!¼§é?©àÞBĬõoh‰›" rÒ*²^îÆX›æ½7~Zâf™Æ†Í¿^sh:r›_Ì…ó†«?ÿpxzš[ÜtÒâ†cÀHKZ!ØZµ¸áÅÎ$"Ê©}“L!1&ðV & (RAQ•‰»%!ÒpLþ™Ø·KÜÈÊ…-R(ô¬vˆÒÀŒùmo{›’‹V(’¬¤nàÇD|m7LÆ´ÒÆÏ‘]–¸±õ¡¸Ö玮¥ŸXŸÉ@%n(G³íÎ m’:Ï [G S^Z˜Ï¦b'f9EYá5á i:÷tIÊ»ü,MJìjlºAÏâsçšµíi¤4–7Xï4:b™U5ùß±&ÀLàŽ;‡*R…CÎõª7øµÁ‰(Âd ¥SÒ â† $åB„]š_)Yu <æˆó4ŽÚcª|(œµ@`|H·n@ÂKQµ[ô¬%c«Ã9Ѥ± ² ±ÛNr«|J‡~Èé Ho7(´ÚJƒr/kå‡OúmÑ–ëë:¬Tr 6ŠÒ¨*>Ê1 ’³´P8ëãë1ÚÒmâÆ*Ç<‚Ž-ÒX„ò^.Mú-ÙÜ.Ž([ÔBÝé{šV‘™‹õ‰4ü«^õª¸’ÍuÒd±)'–…(ÇŠSÒ)â«YHºœàÛ?>dr8J¨¶0bÕ+ëŽ4=\·–ei8 U±Lãëwo7,¾h›5ãÖ`9¡®iSiÙÚ«-®E¤,T옩ü ïtÞ ûÞ­ZÜ´Zævû´ÊUô©ù÷‹, ¹'òˆv>+'ìÔ±Ý.eÇ5tô*bã¾ÊxØÎI$sE½[Ò|BL1ïâÓZóZâ¿\ŒQ9±®4®uk¬Í=¿Û×-q³Ôš£ÃŸ\!Œ_mtSθwn¸ñ»…çîX䤩È$poYÜ@Ê –¨I!jÆ)±â¶BÜðÒ`²žØ]M tRLޏáÅÄ ¾8Yíà‡°’ËË&'¼àPlÛ!n¨?)꬘Ù=œ<“{­n•Ržy9ÈTå§h[ &U¬ C¥!⦠0;^NX0 –¸A¶f¤ÜçèV®—‰}Ñ¥á2qCYh—2‰æåÇË?·šIXLÁy¹#9sV®Û‰Yn‹a¢•;YrÑ_ˆLö§b• kœ†³«N”Æ2â†S¡ÊÚÏÅǧo!–|A1fÅ«ªØ¸ŸÍ7ö(iÈaVš:%&nìĪj-‰RGí1U>Ç7š”éŸEÄúMN¹°ñí–5?ÆkýE;):uŠwŒVý{ƒ¸Édó_ö]Ž`±ìÒ¶`V|µRŸÆµ[4õAŵþ«ÊÒæ=(EÝ’GÝ&nÈ'5ïýTG!Â!çXѵ’#nÚÅÑn'´Ï*ûN_Nó–†·„LY{µccJHvЏaq ÿ,9ÁbQó»roÇQÏ:í·×ù.…°ˆ Pxm—(_®Ñgo7UÇ.(¶líŽÕÆcp™[û‚¸i§ÌíöéFíFó¥Fí´ÈÎΟ §47×6)ꀅ5Ú@Uiv<´ý³™9’­|ia.—Ï?c½ZÛno KÜä¶++keªËnµzfo~ZâfÜËF…U˜V~ÝÒMáùÀ¿Ÿ ÷øL˜õȼ¦â ¤À½EÜ4¤Ò†4[!nˆÇжöÑó[Â*·ÌPÙƒHÄ7L*´-’I/¤…gRŸ[bÐbÐæ³â†A‰=œ Ì(1¼¬´KÜà´É¡„“6G0áÇÂÕW]~ò¢SMÅÓ„,wOa˜„³ª¦-=ˆVq|W$Ôqzĸ ;Љ«¬Q÷˜›Š °åä»]aŸ4DI*š˜q½Œ¸±&µÖÄU‘4]Ú' Dº­‡ºã™H™µ@5Rˈ¶iëLšGýÆâFVvËR™’âÚO;¡i–¸!¼¬­ÒoÓoõ{§‰öá[K¤¢­‰Œ lå>ŠN•“°Ô­òaËÝ×ÄMÙD•vÆV-ÄÖa»«ÊvUÔnW°}¯¨È VžÜg›ù²}«Œ\a{Š”îF}ç U‰›¢•íÞ nÈ'ãï}ʘZÖÒöØæ'¿k„o‡¸)ÂÑÖÏ(ªCêù°pÔ qSÖ^-q“ZvЏI}¢QNI³Ä ñ rÛEÆúËá·õ™¢ñņ-ûÞ‰”Y,ìlÙÚ«Ç`$ìû qÓN™mŸìôØVj­7èµÎ˜¥íRÌ¡µMªÌŠ™çI3ãa«s¤N7U¼“rbß—"nl½§Scmîùݾ6h‰›á£†… 5Å|rù0fÒÈJ8Îyb~ÍÚæñðLÍ1ñ‚yÕÙÊJ‰÷£@}IÜÈ §™S¤RèZ%n˜°bº J d/;»Ç\D D„J¨œ‡’éY¬Á™2 &ÕV°~Àäi‡¸ÑÉVÖJÅ>‡|µjqà €U.Ògb…0á—²­gQVT8(¿VDÜàÐ.gŠÏ‹Œ|3hõµqcŸÑÊ÷LÜX‚Be‡¤¢½å„—1í±ÛlXM̸VFTh‘NÎtø´½Ýwß=žHÃo^Ž(³é™?çœÁ‹ÓOFJ£ÅÅ*¶1r“ìö2ë#¥J2í7¶¼|/Û¶Q%/6L§‰ÒÖ w3އmžŠ¾«=ZåÆm•¸±'Jµ³Uªlë‰|ú_úÄ$Ò-â†-aX!ø)šŒÆæ%Mʶ3õõV©¢z²Ö)ÙÜÈ9±¡þ«QÞÍŒ7rälWz ˜#nÚÅÑÆ¿îºëj§XYÏS;_,a_Ö^­–Z¯uЏ)Û*Å»^>úìûÉŽ£XÒFqbŠ0.@F±˜f…-ÁŒÝH‘Ï7æ(½(Eã‹M³ì{o7,jµ³Uª±ºÑx Fý¸!O­–ÙöÉNäËΗªl•bQîmÆuiè]FÆ¢šÅÒBÊ,ìb€ ‡íÌ‘T/éøj³e žV¶JYò_ãš­×N޵6ß½õ}Ð74豓F…^1>¬õîeúºÁ·Íí¿~*è?L€Y¹Ò±¡ì›ÅìYb}¡L¥9"n϶™«jV9VPN!»åˉ›·ëqº‰,U¸F;d2 ³NÀ¡­æœlҾ哉SEpÚ˜Š&f\/r`ÌŠ¯œÓ4‰&Ž&"z“?êb±/Þx¡ö‡üª¥&ú„±§Îð»7‰» l·p+œÚ@Ø~"K«p0Ù’%T#Åt­¹rÙŠ±ÍCÕïÝ ng4VؽöUóTNí±H±j•¸±æÓE«ö×9çĶ9'¦nˆÛ.ŽÂ*M׿«ÙïŒK`„ôgçÄl¯ÆÏ býzåÆQKló®a£ÆWâ[K/ÚÝ»jÛÓ{¤vÑøBZU¤7‰Þ² ·Ûcl>Ëœ·3V7ÉCâ&]ðQÞ½µÈStâžµ¶sìvÊÜnŸVÙrŸš/q¯È Ý&n·*=æG”¡=ì^#,W^e•xhó’NIÑxØÎÉ.nXËm›ç*Ήѡr‚Ž4iÒ¤ÅÈÙnŒµ¹çwûÚ %nܵ-S7VÙgéB7øµ¹ÿ_φéWÏ 3ñ)aÒ-âF§qAÔȯ ßÛ±®Qžíg«Ä i0øóÂÅJa›m¶‰Î±t¦ÿ(®&>¬Ðˆ¸á:~_Pž1ÿG0×ÓIüÆ2G§𛓪 …& ¼¼ˆÛ*qc}éðÈMÔyaQÇ9¢Š|Ž)¡¶”°Ê…Ãê#Š?Ÿ3 ”ZÕ…Ùç(H„Ü^UKÜPn^À8€C¶§AÐ YÈNÄ7‹7¼@Q6yaÒŽÀ‹“¥Ø6Å=ðe"›zè·&ÈX¥¢‰×ISüÔá+V,r™noÒD„v¡S{ȧ¶Ñ‘.¾nä¼›ßöä~sd0ÎôhK»îºkTÜÔÇ¸ß›Ä ÏÓÄïŒ))fåä4ü¢ 9…ƒë&§„)ÚÓÎ=Ÿ (©X°1NQWU¥ÄÅ GΑ7ÆCY„°òUÅŠHí‘òåŽëm•¸IOa%¿Œí"CÓc`¥¨.wjõƒC˜Þ nè“ôMe–2¥–žl‡Q¿ä'Gµò‡A\Nšâ½`Å:çz£>h㪹Æ; ÅΊ%f©gÚµ¶1[“öœœ³À I‰ëß%çW‰q” +G;­ÈW×,ùZDÜ´‹£[Y "Vx÷2vN˜0!¾+­U° —~·cXŽÔƒlUÿÊŒÔ)‹ò•S>±^Æ’„÷bÉê¢qÔbmO}#>s5Þ‡à„Ð'Y°@Ä Én±._bä 4ä°«=Änñ°–£œÂƒ•B{åHnÊ/CʮŴlíŒÕêËišz6ŸeÄ%Y™?¦ï€FïÆV‰›vÊlÛY§ÇFÍ—À ë|Æl‘Ã\cÎF]ÊÇd‘@¶¾£»à+fÊ”ÚA<µ6 çó¤SUZíøÒ̉w‹(H»Çç¬ m½§'õuk¬­Šu§Â zâ ÆLƾld˜¸ÉذôÚcÂØå)ݳŸž½mN˜~Ýì0û‘ùaÎôž¾B:rK§ÄŽø¶G{‹ÈÙíô—w‚Vˆ^vüg–VõRk„qC\œã2I—Â̪žè2èò›É? ¤D/fÈ—V‰&áZUÀêÉz*­7(…¼|Q–! bxQ#¬dɱ3«—0æˆüÓä°ã¾ˆ&6šPpÝ ÄÖj[yäWˆ{NÜ,NÜXÅ‚C'‰ÙÉ7m*·¡,«8wKE3]§­2Ù…dC1Ä,V¤Mn¢¡‰ˆ%nHË*güNîB4¥'dNbOi¤4vr«χ<ÒI(ôHpÇ"ƒö¹ÕV[ÕêF>'ŠŽF“Sž)E hëuαÔH‘eT¼™ùcÛ vÒOð²üÕ/X°²E\„ $„/D6~®m e)*S¼™üQ{,RZ%nxŒµj¢½²EÒ¢BŠ]ޱ«Þä =ñ™`wƒ 6èQ’Þ nx ʯ¶Q`ipn$¥ÿ2¦ã(¢!ÏŒãŒÇHÚ?±Ì€`ÅŠòl·Ývõ6NøF}0pd»ŠÞ¤M|ØŽ„•…ˆÙ”¬°&í¤G¿ƒ8%<'R&)ûÜO‰Ôãgœ¦M›-w ‡~T³:T»ÄGÄßj'ÇÑ¿y/CÔ° ƒ¤Ä@qÓ.Ž´=°R™xŸBv1þ‘6–Ô%Bßb«o±þk¨{ˆHÚ+§~ã[l“á^êχ$nHÅ2ÆÞS<ß¶ò…²,)GiÌË{ÔβË.ßÚî¦ôôIYQ’s«ñ SõSãuޏÈבÇXùT=!°ˆ¸!OrªÌwÆ.ú*cóQ,Ô°ÒPN‘r\kg¬n4“~qÃvzÚ8Â<ëT†äô¿ìÝCœV‰›vÊÜnŸ&ßE¢÷©î3fŸWkÿ¹ºd.Θ›ççæOÖBPé7úlu‹à*éÖX«ô{ësH7Ì©?jÉáaÉÕ øøù»ç„yÏ/¨µ]üŸÝ"n8òÛZ×ðûêÏÀ*½U"¨_LjY1M…É“ˆ"(oZÉY^ðòæÄ6¬4°¼b%Ž PYA ¢œ Žù8i a» äG'‹&ÆA;Ác‘ŽéVE»MãÿÙ;x;ŠêOzBˆTCï)‚DD ‚¢X°‹ˆ"¨Ø°bAT" –?J/*¢ ¡éH€ $éùßï„s=oÞî½»·½ûÞûÏç½½wwvvæ7³sçüæœ3|ptm ymkeô–S¤)9F:ë?<Ÿ6+SG»—|Êô=Ò×k_¬±°@L‰Ê9mÚ´hiŪZQ±ZîÏR¬ØWK„‰-c‰Otâ„Å™ cŽù\Å…kë}œ4L~ég)qm÷óN@rCJø÷ƒþÊxÎ{Àdñïj½ö7å$/¦’·žÈêßÔ‰÷Œß _.ÊAøÝH r®!X¨@°š çÀžUp5²E‹Ô*‚tõ·'È#Y,=J ïN­wš¶ÅúÅ[O ð±{ ml–D)qÃ3 ÿm®Ã9#nøŒÅÖWôYåb ‚ öî ÜcÛÓæToG,‘;ì°ê†<¡LŒô/?®/¿Zÿ?®¼/Gîb¼ÆÚ(k‡&®›ë&Ï´>Íù"ÂXnmËo–~f9dyàBÅx—Ö+oµû|ðeú*ã[:ÆÐ¶,œQo~SL®7¾Ø3ê rÌ;“Õì}Î"ujå[ï=gîÄ{ŽÛ§æRŒ Œü–æŒÕEð22œ¾Êû˜ ¿oÌ­ý™#ó¾ ÍŽôÛ4$k¡¥‘:[ù›}§-ô¿§®ô[[#Ç-.>¸Gú]íìš?¹Ê¹¬E;Ÿ¶ÖçFÆC˯ìÉî£U™7˜õ£§ïÒ×@Ê랸±vgÝ“ðôA,x°ΓvµyÏkõùACÜp)‰põžØ¥As´ÉL=â†&5ÏQ!n†Õ†Ç¬kp•BVÞjL´¶ñ®Sµs(~µQ‹#nŠ?©\J0sýÀ4Ó_!få’ ‚ir™ ‰ÜCÜVŒü[³È¡x±ÿpc‚€ë-‚ü£³ˆ½¿~†¸¡]ý*_7×…5ÛI#kuÓ7¦(ãò¹IŸÃò7˜V -VD±‚¤²÷Ä?ÃObS7+Ÿ®Ÿ3ywQÐQê½ÅS+žo0¿™•/¤3iü*TVº¾8ÇHFú ŠRV»öEÙ²žÉ{!!ã$~:Q̺s(ƒ(…Ô•û GºA¨ÓäÉ“c`p”FÜSŠô~çÁk;`êÔʶƒ¸¡o€cI™¾²J¸/Ë ªî¼/Ä`q† ²„÷š?ÆÆTùÏJ_ë\+pd\+„2¥R­ç׺fce¤}Ó…ƒZ÷–½žž¸±ùÔ…ÆQ”3Ž­ÞEÛì2ì²Ä8úDY«Å[B”µR+ZêÊøÃ; Ž,ˆ”‘¾«™O®€ò¶²í‹Ô½Ñ:·â®W>ÆE\ë8Òw‹¾›g¾¸¨G?@ÿ(2Ö×+K£ã¡/eçHŒÑŒïX1Ö16åÕ#%nl ˆ?L,{‹ÎÏÚ5ÖÖøÙë"nDÜäö!üfy©žÙÿŽ0bÒÜtvÁ,lì{;“w·7 º¬RûË`ŠB€O¹_ÍdÒŠ©vYbpF KÈ»¯Kú+ê¬ôdÉ@$nP@tˆ àú‹Xlúdj‘“EÜ´»^ÞåäÖ[o©¬p÷ŒÛÀä‹#Þ©VNJÚ]¯Fògò`ÖwY\ÉS÷! ú <â¦å0Á: E›ß ܯRk<¬®XmGÒàã­*›®ÍïÖÑ!Ðj¼[]žËi«ŸÙ×ùå7}]®N?_Ĉ›Ü>‡)2+p³¶½'ŒÙaQnºôÖ7­Œi“æß­Ä åd dÜX%Ìbw@î4"fRÉ.L˜÷•`îj‰ê~å•Wfe 7˜Ý²:€9=»+õa…ßjHÅtç¾ n0c·>†˜Êcˆ•IìÚ@8b½FP>V™p¯Ê[muU„€øôqª~«aÈ\N¦WP³Àï6‹hר™¬¬Õs¼¹Î¿C+1èp¯Ísªs». \˜Çí·ß~Ñ:+~#)Ù –˜0]DÜ,oa7"nrßuâ‘`zúìŠO†á‡t‡)8…-CÜ  bBÙÑÉ•/LÁÓAžÊy£ÂÄBŸjÌcûJPþmG â&¤þéV.ˆ?\nØ9P~T,¾ »{0!ìObÛ02i%À+ýé â†ç´ÌBs©øX?éµð±É܉!”ob ÔUuB`p ÐWÄ ®8ÇUv©dWÏñ}yâ‰'Â\ÐÕe­U¸m·Ý6Ð?y÷_xá…ðï|'ÌŸ?¿æ˜Ðéºo³Í6á ƒ <ð@øáX«:]qMï{ï|ç;Ä ÂC=.¾øâj»¬¾úêá#ùHXºti8õÔSìY³ª×ôA! „€AGÜl÷5âg–Æ61~h¸ñ“ äöͬÛK^ò’xþñÇϼžurï½÷‡vXKçw^øíoÞúÖ·Fň‹sçÎ ‡zhtúÒGuT•¸[0–ôo~þ󟇕W^9’ ýQŽ=öØ)âåø@T¦k ®û¹çžF‹ ñyå•Wú"wÝçnß·Ûn»pà†!C†„|0|ÿûßo9†¿úկ¨Q£ÂSO=Ž8âˆjþ_ùÊW¦›n¿ß|óÍáË_þrõš>! „€B` #0¨ˆ›!Æ„•·X>·Fsçü°lÉ2û:(Že‰›qãÆEKgÑ¢Eá±Ç çœsN˜:ujxË[Þ9äxYġԺc·+r­«éàÉ©ÓäEŠìñdž n»í¶ð»ßý.½\÷ûÞð†IcòÜsÏE%2Ë›ZcB§ëýèGáꫯ¶bwå±[ßwˆšüãá5¯yM·Ù³g‡Ã?¼ú½UòˆˆšÍ6Û,>†ßˆœþ ¯}íkÃ.»ì‹ú­o}Kä{h4•Q! „@—!0(ˆ›¡#†„Ñ“†‡Q†‡qŒŒGÚaÁ¬Åaîý ãqþÌÅaé¢ÁAà”%n˜p~ìc‹]÷öÛo'œpBülÿp•:òÈ#ÃZk­Î:ë¬pçwÚ%[€@·*r-¨Ú Í¢ÓäE ´‘5ÿýïç>õ©ôrÝïŸûÜçÂ+^ñŠ˜îg?ûY¸ôÒK{ÜSkLètݧL™Ru•‚¸évéÆ÷}Í5× 'tR´óøuš¸1W©Å‹G··§Ÿ~Ú§k?û÷哟üdxøá‡»¶¬*˜B@!Ð xâfĸ¡aìº#ï&í<6 3´GK,yai˜ù·yaÆ•sü‡†Es—»QõH4À¾”%n>ô¡…=öØ#¢ðõ¯=üûßÿ`ˆtwuºQ‘ënĺ¿t&/RDš%nˆÃ8²lÙ²ðŽw¼£TÜ¥¾®{ŠE·}ï¶÷7Y\4±¸Ažyæ™°ÒJ+Åï&nº­­Š–GÄMQ¤”N! „€ÈC`@7ÃWVÚhTØà=«†•6•‡A<ÿìÝ Âý¿|:<{ï‚°ø¹MÞ”%n°¶13oHªö7Á¼~Ĉá®»î ¸z”“É“'‡VX!®Î˜1#*ªEî-’†UlòôÑGcþ(ÁYÒ EnÕUW 믿~tc! t+ƒÓæ›o¦OŸë’U‡¬s¬žS”¿{ï½7+Iæ¹V´KQìÓðì 7Ü0*­»¥ìHy1lذø×Dðh4ï"åi–¸!VÌj«­ß%É–‘¬ºgÝß*¬³ò®uËAþxOž}öÙZIK]³÷âã¾ûîˇZñ¾3fm°Á1NÌ´iÓÂóÏ?_ª¬>ñ/~ñ‹0~üøxêŸÿügÀÕ‡Àôô×f‰Æ%Þ'\lï¾ûît˜å¹Jùrù¼îºëÆÀÙ`õ>Mš4)tŸçó>ãæWD°([{íµãßý÷ßfΜYó¶²Ä ñ}'NœˆE‡…NÑß-+H½º[:…€B@þÀ€&nVÚxTØð°UÃ*SÆjÙ·¼î;£BÞܳ Púþš¨(qcÊY^=‰7@€H”„³Ï>;&ûÇ?þ¾ýíoÇÏÄÀ3f9ö?øÁÂ_ÿú×Y¡<Ø’•\LßQ¼@)Øu×]à 7ÜPJyúÚ×¾6Úh£HL¼ç=ï |G ³Õb ñô¥/})Nˆ{èÅ/;î¸cxßûÞ×Ë-€2þýïÔ¥Œ`‘ðö·¿=ÞrüñÇÇ p@>|x5ˆ”Ë/¿< Ô¦RK‘£^§œrJxéK_o»çž{Â1ÇSÍ‚]Àî‰Òè…¿¾ùÍo†ÓN;-nH¹2B¼‹W½êU‘³ûh?ÜoÀ7Oé%°)«ø('&܇B{úé§”Ã,i¤]šÅÞÊŽïÿû{õ ꈫý¾Œxò7CÜ•¶Øb‹YÌ™3'öµ[n¹¥Çy¾œ¶Ïr]´ÄFÎX,ùo|ãv¹×ô³Ÿýl¯óþ„=ן³Ï´áÛÞö¶ø5oLࢯ{V`æVckçV[mÇ+åHûÆ^{í¼ëßKyƨ2V†æ’uÅW„o¼1îº!gV ¸¥ y3ï;c;ï?¤‚ÞoÚžx4÷õmåÓe}†¸;vlÄÁúùùçŸßqÃÎYìe„=—þÌï¿=YÁ‰ßô¦7…÷¾÷½19ã—)æÔ¸2õÈ%—\Òcl}÷»ßÞüæ7W¯ÇD•èàôÈ#Ø©G~—xG(¿ÿMaü†¼á7Ñ“8ì²’'ìÆH¼8~«>ýéOWÇs;O}ÛO>ùä^ñqŠÔàÜàœYÙùñOú“eÝãø½ï}¯Úwø d ’! „€ÝÀ€%nF­:<¬ñú£µM¨±º™qÅsaÁÓ‹ËÜÖ¯Ò%nP¢YÍ&‘(%>x1i&ÔJ‘(') ü3w Ò¦AC- (VÛ¢;`±Ì *÷=ùä“«Ž,„¡ÑC¹#‘2ô'ÈÀ,±4·Þzk8ñÄÃ&›l¾úÕ¯f%çè?(µ¤(q“7&·ÕR$%nÚµh»ÿþûW«çû»#1nä äOÑöEù¶– ðtËë;<÷3ŸùLÇ6ú¾C’ŸþÓŸ†1"=KèKŒ‰ŒÿôYObe¥·s´ÑÿøÇDl3Ä DäL&E.é¸äƒ^“‡Isêˆ5˜OÜøv÷iì3¿ üà{$\·ßK›Áâ‡ßE’ŸŠ<ü6’ˆ]æøô„vzc6»'ûŠÔýª«®ª.¨^ÁK¤ja2>ì¶Ûn‘LãV2(P¸ ö]ïzWµL·ÞzK…0XÞÞ¤õ;QN¬©(«îä¹ñÆWß9¯Ð4Ó.Íb9…nVX˜a9Fù±ÛvÛm©Z´ðbõܯ¶Ç 9ÿŒ¼°ËûÛߢ"J?eu7 „›ØÁÉ“žFÊ”!nèk`ŒØû…%«ì^ù^K°Ú¡Oau@»AÐ}ñ‹_Œ·00& µÆ«{JÜ´ ë"Ä eFaÇá_ÿúW$%;ì°j€ WEÄ+Ò–‚†~ƒ’ŽÛ)ÖcÖ§Pªüã[ÒH¾bu„zè¡‘@¨÷¾s"ÜÈTúÌ5×\®¿þúHó~Ù¸O¾ôᛎ éSi”¸‰äReAÀ,1éoþóŸãøÁø 6ž¸HÇ¥"Ä•«B¬Í !pMc dglA YÀò‡6c|âˆð\#Á9ÇøÊb»ÈûŠ&DÈ‹´#– ídV6¸‡ÑÔ™>`HàÂØYÈï'cYMâ6E€c“¬þ–Uw{ïxã´'ÈËc›EžÛótB@! úIÜåøÍG‡)'¾$ ;´²‹ç- ·ÿxxfZOKŒR™tyb›Àµb©㦖’ rÆgÄT`Á2%†I$ט<¢ ¡Äyñ7˜ÔUˆ=qÃ=XëøÉ·wI•1/“y /ÄÉxBIbò‹û“ÏÛ§õŸSòà¼ó΋®.> Ö?YHºÚ™EܰªK],åå%AY4e%ž¨ü[e•U¢¢`ÄNª Yº¬#VXo þð‡{´ ä„ÊʬY³zXüú׿®’f¸B\yå•=²ÇWÄ“|Í´K³ØC¢ƒÉ"ë TPø_æx¢Æ?S¢H‚ Ž´ 8bñqФäe#ÄåÍÑîotW)s£ôJªÏ¿Ö˜`uO‰›va]„¸¡Œsž¸¢ ˆµÂ;R†èHéÔ}œ Â!¼K}S¤yßqÝùüç?ó‚TÀb$uUôÖeê3Mþ5JÜ@~½ñoŒ¹1þòþPˆ'Êiäy:.yr!Ï↼È¢,ƒx&ä%D¬‘£p=3‚“÷5Ë Î»ù{É¿^Œo‰Hi? D.™¸­}†("-Â*³)™vÝŽL¬,³ªL;!Í´K³Ø[œ&,‹,¶†•ÕŽHµL V#/Èã§÷â¥Âj½Yb¤ViF¼d‘v–¥1W);ÏÑ®uqÓ.¬}Ís•Â2…±#•ã?®j —Æ#IÓÚwOÜ`‘Äûg¤Œ¥áˆÕ–„0ÄÒÈûî ‘ï~÷»ÑR0fæþ¡ôƒ±Ï~ŒqÉ }l”¸ñ„3¤ än*Þ3—Š7é=–?ã ‹®L¸ú¦âÇ)o‘‰{'툀!)©@VCÀ‚/–8X̘Ô#nø}ÄÒ!QV 孷޺ꉕ–;ˆïoyu'n±6”/u—ònRŽ „í·^‘&Ž;Ûe‰·ñVhþ]ñ÷å½ï¤1Å¿^2w´´?ùçùÜqá@pk„/X0å‰åŸŽKž`È#n¼5ŠÏß[8ùóyŸ½E¦½¯þ7#ᆲóµˆOÈÔÃåœÊîÄ&óïoyu·2½îu¯‹nX|gÑÁv—27)êÇxDÿ! „€Ý…À€%nF¯6<¬½÷Já¥o_#¢(ìÿ=Nxä¢gÃü'µ«”aÖLŒ˃£¹ñ“ð¹sçòµeÒnâÆosÜqÙ –Yâ•oŽŸ§È‘+èÄ^IÅŸÔRçóOª ùtyŸ F̶æ¸m‘—&þÄÂ!8(‚K®Y¤–"E¤(q“×.ž¸)‹½w%£¬i&+?ñ'PxØ ¢GÄMùàÄíĺ/‰›Z;Е%nèoyﻑѤññrøîň¾ nˆd–võ ¿ÒqÉ‘e‰?ŽÚû걱ϼÏX,a‡u b¿]âÖ-Ä $Ño~ó›8^›»”w“JÊ&: ! „€}À€$n˜\ 6$ŒßltÜYjÅõGBú¹Æ¥ž¹k~XºdY4I.tc?KÔ7>ð¬ÁõÀTÝŽì\³ÇF‰›f]rj•Û“µ\¥Ø©gûí·Yå¹J±¢Š+Á+ú:–¬Ø{ÁÜ@Ÿ\gE%Ð4UÒ´õ¾ã¾E`a"[Àc¿*ìÝXuÇ"§ˆ4Û.Í`ïÉ ¬81EÊŸ—Æh®q•‚àÄúÃÄ,nòÜú<)Uv»¿žE–=/=âZÄ–ËÞ*Á§)œ¸X÷%qãÝm<>|&¾íäÝ.=ÁPô}'¶ ïR+nJ;Ê{£D|@åŸ+eâ:qo'\¥ò¬NöÜsϸX@9òâJq-Kcm9\ߊö·¼j7¤1‹)?fÚ½vô–9y®Ryu·<8ZÜ&úîRà„{ì±q‡¯øEÿ„€B@®B`@7†ðÈ•‡…U¦Œ ¶j½úp;yœÿÄâpßO‡Ù·¼Îé­äfÞÔOOvš¸a;gvbâI<V2×_ýˆ^Ñà¶E¡n”¸! 2ÛÎàÄY;Cˆyüp%0&b÷L›6-ZŰÊ:~üxn’*Hv>ëH{Ž92¶í™ZóX¼î5¢‰ÏO‡ÏY„ǀݺˆ;‚4Ó.ž¸!¯²Øƒ¡ƒÎÚë¢SO=5bÉö¹Ø”gÕOÜ Üc1áwØ©·¸ÝÏ;Å3Ó­ŸX¡ µˆ›<‹™ZeçšåŸwY‹òlÖ}IÜP¯”tãÜ–[nN8á„8òÝ»75ò¾óž3¦Ú; ÑxÉ%—D º‡@7é+âÆ{ÎÚ²’÷ÉÆ­t\j†¸¡î.Ç¿EYÛ¯»îº1&ïŒi#„ßLÛÅÉ~ÇÒ@ì¾ÿ¦ã›'ÒÿûßWáÇÌ+ÿZµxâ†gšå.kk¬±Fì‡W]uUu;+—ŽB@! „@÷ 0 ‰ƒyĊèՇ…׃š°ÜúfÁ¬Å‘¬™÷à¢0ÿ‰EaÑsKí–}ì$qããã\}õÕmÛŽ6]õGf‡'›¼³utê ”×@¦ä×RL,„ƒí*E~(6¸Øsï¸ãŽæÉ'ŸŒd«ãLú‘,%>^Èø—’”í¦›nŠÛÅò¬]wݵj†Ïí§œrJOò9¬fPˆQ€¤ñ4ê|%f õïTAŠæüû^e»aVhâ$œWÙ%Š ¬¬hCÔ˜»WJ&±ºí°ÜCŸ ü¸¼â¯¨*²>Èo3íÒ,öl°AìŒ$¤.FyÄøa¥a;g¶u."F¼XZ¬$þò׿Ævß|óÍÃ{ìƒs¢ë,«LP(·Új«øÒk-¬”6ÜpÃppe+{ܘL²ˆO¢oèŠ+®=ôPî®`–—ÛAÜ´ ë¾&nÀ ·P”jƺ]vÙ%ZÚØXò×J»Cš4ú¾³ã+ïs–ðÎANœ8±Ï\¥°ðƒ`2K¿™3gÆ1B™1€±÷H“t\j–¸ñ–3Œÿì@ÅûÌ;ÆŽ{íµWuü§ãÚjâÛ…w‘Xe´ï¿ ‘,w'ˆ9Æ »~Ùe—…Ûo¿=Ž'K—.c Èx3\ø b\dì´k›&fVùW&8±ÝÃbIºË£YGZ…€B@îB`P7ù kFÚpnÑÜ¥aþŒEañüÁAØ"n¶Øb‹pâ‰'ÆÇ21F©g’jé`»Lù`¼˜óûXþð‡?®‘fˆòßf›mbÌSÔ³žYk§˜¬ôž<ÀòÅܲÒúmíºWÀлD¥Êîgœ.ºè"»5®´¿ÀÈ»ñÆX(J©‚d鲎/{ÙËb,#¸²Ò A‚APya àý÷ßߟêõKˆ /¶K³ØS0âÕêXŒAJyrÅ—?ýlÄ éqi1¥,MŽÄAIc™ÀeyJ:Ö (òHqsÜqÇ‚K{¸±Õüù¬Ïí nxN;°îKâ†÷Ì,¶²pÌŠ1ÔÌûÎØB€lW홌XÂñîA8Ö"¶ížZÇF]¥È< §óÞ'Þ%ÈÐ,B¹Yâ†ç{«¾g ;ã|òɽ.yW¯^+'x_±¼a0/¼¯jiqy5Ë•W^9¦IÇjŸ–TÄ7ÃÒͤâÆo¹N>)Ényë(„€B@tƒŠ¸yÕé?îé âg–“ÓÿøLxîÁ…ÝÓ*IYâ…”?„•:”|/>°èu×]'å\?³â?Ë‚òd1â‰Ê?H&ŠJ¶)¾æÄ÷4~K¼!çŸM¬k)&fÒÎ$Û[ÜX–ÄÁý„‰´Ò³:‹Â\FaaùÎ8ÈøŽK{ï½w|yqÓŒ !»YsÖ oåd|Çâ$%>yÏ¿à‚j|-KoGÈjÞU»ûwŽñ‹ßE,î k»ÇŽÄ!b<²ß_Î{â†ï¸¸UyÍ¢‘sùßwß}÷.OÜs­lݹ1ò•Ï~á„ï! „€B ûTÄÍߣ®ÞóßË›8–Ý£Óø0©Ocwt² 7(ÇLî‰À„œ tYIÉâ· LÒùC9AiöÖHeŸ‘¦'_ÊŽ`’oDO¾Xè (—(ûŠ*X¤„Q­üè‡Ü‡ÅÉ]wÝU¸þeÚ¥ÕØcÉ`¸RßÕ-ÒN¬ÛYÇ”¸±Ø(Ô²šþŽžn¦LäfAEß¡ofÉ9gŸ‡3®BDöµ`‰²NÅíòоë-I:Q6.b¼Ðï âž’âyeà#¤ ¿ `^Tp;vlX¸paîïi Ú!ec°†k56¶°Âodz«ó/Š‡Ò ! „€Å4ÄÍúU®w¯REeömóÃÔ£«~Lú q3PÚ$¾ÿ垢ÔïrVÖõw ` z! „€èo ZâæÁsg‡ΙÝßÚ«%å5e½Û-nZRÙ.Ȥ¯ÈÛ^XQfå«!Vq‰3a1j°ÐÀ%¤•?]{,B_aß-õW9:@_7ÔÒo) yƒ«ÝôJðp,¹°Þ° ¿\;æ˜czmIÝy¤ôÄN"€UÖ~ûí­úp5’ídz,2;Y6=K! „€¨À !nR7©©Gϳo{¡>B0…ˆ›Î6j_‘Lοð…/Tã eÕòŽ`¨E]²òèæs}…}7c¢²µ¾"np}$èôúë÷t öµÅ%‰4÷ß¿?­Ïƒ¿Y€U—XEš–! „€ÝÀ  nR7©ÁlmC—qÓÙ“@–¶,»8•‰ÓlIYU}Ç;ÞØbšXc*AiçTV≙@ì›Ë/¿¼ÙGtõý}‰}W£Âµ , L‘‹/¾8\{íµm{VVÆwfkm$ÊY3}úÃaÚ´»âûž·ÍÊCçXY²ËãâÅ‹ã.R^xa5¸óÀ«­j$„€B`à!0(ˆ›ÔÚFÄÍKbO–«ÔÀ{¡U#! „€B@! „€X xâ&µ¶¡ùënRÖueqcHè(„€B@! „€B »ÐÄMi3Ø­mèŽ"nºû¥Té„€B@! „€B@–¸icMÜû(â¦7&:#„€B@! „€B pÄÍ*/Ö{÷*a•—î…÷`w‘2@DÜ: ! „€B@! „€èn qS‹°¡ óößiq“"¢ïB@! „€B@! º~OÜÔ#lfß6¿BÚ<Öè÷Q©DÜôðz¬B@! „€B@’ô;â¢És‡òõiãÑøßg7ÿÃBŸ„€B@! „€B@t3ý‚¸)CÖ6„ »G;í…nƾÏÊ&â¦Ï ×ƒ…€B@! „€B@”B «‰›znPiMEؤˆdq“‹Î ! „€B@! „€è6º’¸)CØY°²°)Ö½DÜÃI©„€B@! „€B@ô5]GÜlûõ53·òö@‰¬ñh”ÿ,⦽“׳„€B@! „€¥è3âæù{¯ÏßsUX0ãö°û¥ë÷*´›^´ìD£Ä „Í&›lÆŒY¾%{Ë ¤Œ„@"ð /„»ï¾[N¶-„€B@! „@>}BÜ,šõ@ÀÚYk÷GÂzï^¥G ÙÊûsf÷8§/­C âfË-· ë­·^,ÄÌcÃS£7 Vœ–ž† ÖºÂÈiîùH¥$B ¬ÅÆ&Ož\Môàƒ†;ú]„€B@! „€Ý€@Ÿ7fm³î[…uß²°"mzÀÑ–/e‰OÚÜ9dJX°úöm)WÑLEÜEJéŠ ðÒ—¾´Jàˆ¼)‚˜Ò! „€B@!ÐIú„¸yêâcbwúżuiÓ޶})CÜà5eÊ”X–ÛGï߸må*š±ˆ›¢H)]QÎ?ÿün(R¯2X¹Ð×ýôèÑaþüžnܽ ÜÏOŒ3&Z¶º­Ì÷CúP:th">õÔSá7¿ùMse¿|à#FŒˆ1Ìþñºñã#ùHXºti8õÔSìY³ Ý×W‰¶Ùf›pÐA…x üð‡?ŒÅØl³ÍÂk_ûÚ^Eúûßÿn½õÖ^çuB! ºŽ7+oµNXãµË'F#Æ c×®ÿôîDg€–ªUĹH™”w—ʲ¾iyÓ)â%bäÈ‘ášk® ¿üå/;ÖV[mµ°ýöÛ‡Í7ß<Ì›7/N2‰·²`Á‚Ž•!ïA+®¸bØpà ãå{ï½7–//mÖy,¸Þð†7„M6Ù$×ã?®½öÚpÏ=÷d%/tŽXLüMœ81Ìž=;ND™6*&nŽ:ê¨*qs衇†¹sç6Zô®¾èG?úQ´âCë‘ðêW¿:|êSŸŠØœuÖYá /ìjœZY¸—½ìeá‹_übÌò›ßüføç?ÿÙÊì[’W_öÿaÆ…÷½ï}a«­¶ “&M |_´hQxì±Çâ¸ßH_Yi¥•¢»å[lÇ$}ƹ;î¸#0f–ƶ<0¾7>ø`øþ÷¿_6‹€â~ä‘G~[¨#…Ï?ÿ|´*læ=ÜsÏ=Ã;ÞñŽ8–3¶“/cðé§Ÿþýï—.§Ýð»ßýÎ>VüþC3bùÞu×]áØc-”ÕW¾ò•°é¦›Æ´7ß|søò—¿\è¾¾Jtî¹çH„9Ì•W^?üð@[¥ò׿þ5üà?HOë»B@t)%n† F­¾J>fx„cȰÅaÑ3sÃü™óºžY¬V7FÄ`AQcÖ7 –gYc÷ä]/‚x§ˆ›Ÿýìgq’‹²sÚi§)ZSi˜PŸp aíµ×î•b&Ã×_}¯k:qÄG„á׿¿çœsN¸úê« ?~£6 Ÿþô§Ã¨Q£z݃ÆW¿úÕ8ñïu1ç+¼_úÒ—¢2’&yöÙgÉ'žØÐꨈ›ÍÖ|ÿÖ·¾Ö[o½@ÛvØa­É´@.ƒ™¸ž¯}íkwø=ïyO©w¬¼M'é+âåö{ßû^$Wò*ÁØáUOÓ>ó™Ïˆ2³îy¡B–\WÃË!ˆj DÈÇ?þñðš×¼¦š R%¼Œ¼õ­oy÷@R}ìc+Ý/_!§òÂâ 1‚…XcO<ñDÌ‚#}¹±|Ë75_ÈÔ©SDN7‹'n Êù¦ÑLˆ¸1DtB@ô:BÜ µb¶âêaÄ*“ÈÕ6 ÃV˜ÑYòü¬°èÉû¢Ù…%Ï=–.x® ÖÏKÙ—Ä Ð5KÞ Dâˈ‹ –¿‹/˜†7.Œ;¶ÚãÊ’%Õ›ø€É:¦ë7^Ê”…•^êgJÍsÏ=x[å&_V£ISDPj V]uÕ˜%ˆ‰=V7F ±¢Œ¥EYK%7EZ \šW¾ò•á裎7ýô§?혛ìÄ D0òÇ?þ±£Öƒñ¡uþõqó‹_ü"Œ?>–BåÖÛn ì&·å–[F«BÆ‹Àï~÷»ñsÖ¿wÜ1Z³Ø¸É9ùÈ#„§Ÿ~:ŽQŒsŒM&XádÉšk®N:é¤@°t/e‰ÜÐ>ÿùÏW³ÀbòÝ"_õªWE+.â.ST ð_ÿú×ÇäÔ÷¦›n =ôP$rŒäà"_|ñÅE³­¦3‚åoû[µïV/6ñÁò-Cܘ«¿É¸ѦÝ,S¦L©ºJAܤÂïù\O‹¸IÑÑw! „@w#ÐV↉Ï1«†‘²fìf{…Ñk¿<ùL óîº$,|òÞ°ì…§K¯üdfª“¹´‹¸©ç*å Ô y3‰›7½éMámo{[„èöÛoßþö·«p¡°ÊÊû„BÀ*l§â˜cމ¦ð<“É«‘/eˆ›ãŽ;.¬¿þú±ØW]uU`U”‚¬Á½AA*âsÿ–·¼%ì»oe·³Š<úè£ÑÄbàâf†\wÝuqu;~)øOÄMA J$ß¾„’÷ö·¿=Æ‹(q{SI;qxgœqF|ÇZánÒTcdÜÜÄͶÛn¾ð…/ÄÒ@ãFä-`ˆB\„˜7ï~÷»ãçô÷í¶Ûnñ4}›qíøCš,~gŒ;øàƒczÆrÒŸ|òÉ‘ôð7°)À¡—I#Žžyæ™Øv|/KÜ`Ùcäö_þò—£ÅžµîºëÆß{¿+Mõ×3Ï<³Z>ÆoˆoáC;êZFŒ`éâ¦L¹ûCZ7ý¡•TF! „@6m%n†ŽY9ŒZsë0þ‡V,nVË.Á‹g—<÷dxæ†3ÂÇn K_˜S3­.6‡@»ˆ›²¥j”¼iqÊ#«žsæä÷µf]¥˜ ñLÑëM\?þøèF‚’€uK*Ÿýìg«æÚ( ¸=tBX…]gu⣠”þïÿþ/@ E‰›UVY¥JDQnÊï…n\¨bA@ÕVq•W°Aù2a囕Q\Ï–Vý¾¯¤kA«ˆVj'Ož•­Z±-ò× 6Ø ¬°Â qU¾^ÿñu癬îƒåÃ?ÜC!µtµŽ(t´ù´iÓbÙÓ´XàòD,`òÕ‚žwÞyQчÏ}îsµ’W¯wb¥Ð+Õ– h9Ê7Eó´2Ø‘ww$Ê{çw¶òBi&X3í ÞE­Ãpó¡ °¡cj AY‰-…ð¾Üÿýµ’·äD}b•2æõá¼þ_¦àÇû»ŽXÛÕ,ñ蟳V3ÆÐ~ô£½0ÆMȬT°Â`\ä«'´î6”²™¼gΜY½Í[᪅e!1¡ÏÊ7X¾X<ÞH£ô=«¤¨ÕïKàfñ£ª¨|0¢sXØ•%nèkŒ[´=‹ybù–±¸ÉË+ï<íJLÆ\€[³Œ¼°œãw×÷—¼rä'/YÜä¡£óB@îF -Ä ´!C‡…Q“6 ã_õÁ0bÂò•özP,šõ`xæú‡3ÿ–-]R]Ñ©wŸ®—C UÄ'¶7åJ±Kˆä’K. ?ÿùÏ-«¸rNß¶ëvÅœ>™W6v’²¾ÇJ}º[m…B ]vDñï(D'¾XÁ/SŽ"ÄMÙ<­î(½¼Ó¸4z€a׬¼ æ Ô RÓÆîç>¬6òÜKòÚ…œö"ør–Ð/ÌŠ¯–{c;PáRST|ûA@C°‹—Yæ‘ã,xøþdùûþhœlN9å”`ýš¾ì‰]~ËøÎØá·Iú$ãJ4ïY4’Žkä QNÜŸ,á=ìü.D”‹¾Œõ yrÈ!=vƒã:†U܇ÌÐ?Ã[»¤d&Ä  Aˆí™üÆ”%nx§h$/8ý–ß>Ê ùToì%/‹™ÄçO~ò“‘”ã³~siW„ß ~ßʈ,µ,nÀžñ‚v´±’g€7±à°Þ¤Ý½X¾YÄ m © ÜG¿¢m°†}ï{ß³I|ÿêW¿Šã<¿/dôë«ö\È<ú[9hiòŽÔ‹21ÎP.êyÃ3S‡8yÞiŸ·{EÜ: ! úm!n€aØØ‰aìæ{‡q[/wÿ( ÍÜ[æM»(,™÷¿ô¢÷*]1ZMÜ4h˜—%oÊ7ŸøÄ'â(ELˆ³„ bJÞä7fC>(A^9C9`¢—%¾ (¹f¹’•6=÷á8¼â¯ˆ§!£Ò•Ó4}ú% †É¤QV€EV_! ˜ðñ Ê‹ÒÍŽVH#Ä +½¶i^PZ½½öÚ+>£^ýPêÍÍáÇ?þqæî%¬ÀZQ°Ø¬¨4CÜ@:ø YÏÄ•ÒËÄ+®(ÌFÚu;Òæµ`vž80ž(²kv¤ý°äòýÀv¨AY'‘OÜ@lì·ß~þrÏ(¯3)9D"ÚÁveÁý$µJðyS7,•¼xâÆ§õiìsV9ê7äÉó  j ;ë|ýë_ï‘„w÷Ù+b=T¾ xbiá%›`¯¶cŒ¿fŸÓ¾eç9þö·¿ÏÄÊ”RÁå…ÝÞÆ4âáÖFµÖf•Ï÷OÜ€Lž<9„´lËŒ`qwzeLS±¶É”[ú5¿wyJlÖ}v²Æ”pÆ-oÉÁeL^™‡ˆc\gœ¤ü<—ñË4b€½‘Ø6VbÀòç<ØÛwÊÓqã‰'bÒx«D«#Gs§blxç;ßé/e~†|cÌá÷´Ö.OÖßÓÜ\FŒ`É#n ê CjyYD”å›7#5Öfäm»b±`1‡¤Þ¯ýëHd3¾bõ•W°¢ÏÛ­ŒÐÿó~ȇ>NYù6ñÁ”÷ß;]=Џ©B¡B@~‡@Ûˆ›6+ïtd%¾ÍF¥@YøÔ½a姆E³ÚoÊ]ª`(q«ˆ !® –7ÍXÝOò¦â†g (]tQ4©Þyçã™6ÙbBê·‘Î"n˜”[¬¬p1aÅÎÜœ =.½ôÒÀÄ“I&“v´± qCl„ïT”ˆ¡•Õ7¬6 £#fp:¼bÕ‚Â`„• <+|yÄi!nP¼QÁ$oE—@™LlVtï¾ûîø9ë–4¶’Ìd;%¸‡ú°r¤äZ»Úžô1È”"ˆy[[§J³áAݰÔÃ+&Úê]ïzW¼þƒ‚o®œäK;[€[Ú…ñgúôéÑå‡{Íê2.‹¨4¼½õ å@Œàsž ×RIÛüi;ú!åÞm·]+äù”êmY¦sÒ÷#nÀBÄ”V,x×Éá:îæÊ„BLiˆ#Þ7û#}^¹–%XD¡#ÔÑ ¿ 4¼#$m±–æ}šö!òA"!=ÖUœ‡äð¿%1ûgmA`Ä»œùËM~{êÕÝ[Ð@Üxr7+c+K1âÓ›5 m“gÕäÓûÏF°ä7ô –ÏoÅ©§ž-†x×!Yl¼LÝ´,_?Žn³Í6q!€>VXú8kEˆ+;c7cãã/ýЬñø²¸I–¾ÖÑבñ,˜ŸÐ¿±èeqI 77µPÕ5! „@ÿF mÄÍÈ—l&¾ñÄÀàedÙâá©ËŽ ¿³ÌmJ[›Ðf)¼>ž:éºÇgs—âd§È›F‰V0Q\½b‰bŒ “6&ÅÞ_?%n°òÀÚA±þýï?Û?s÷aò‡r’’˜<yS”¸¡\L¬Í e‚Ig#ÂVµ¦¸§.KžtbÂoJsúœFˆ\Y‹tò2L®!×òES|$Ï‚‡k(ªXÕ"¢H—J£Ä­¾’ Ûáz€1k,𥿠^qEÉÁrÅǸƒ„¢/`Ñ‚rkb«ö|‡8á^úŸ ýج4üŠqJÜàr†Ò‘ŠÕ‹|±ž¹ùæ›{$±ÀÜ̪·)pyJ§WüQBXÏr»j´µˆ›Fó$`¸‘F¾xP Wmgêo #Ä ï‚mï 7#o xPÖp\çý5!OQÆÕŽxRd‚¯ða¥–º´ù´þ³o?ÎC@@DxAqßgŸ}â©ÔâÇ÷ú6}œqòÁЇ1Ú ¶[J-ýÞ[¨Ö÷KúnYåó´Ï*´‘‘¡P^Œ¼ðÄqºèŠ;ïĸ›’í‰ÒA>ñ{ü¯ý«Jq>{^Þ;”¦çû9bk$ð©eããýàz”·ÓybUa‰¤.^ñ¤ûW(tI{}´÷%¸±1%%-Ȭqñ…ÃŒM,_#nØ ·gÆVÞêŸþ¾%n³±do$úˆ-ñÛjï´¥É:B¸ãÑÔv@IDAT™Ë4åbC´Òïˆ.”)ÄcÑ€…õÄ+c(á^9´{ÍÕ‰>1ã jÒx=+Ø.iPdéCeâqøöÃw¾,ÉSä}ÿ‡¸ ·±Öcçó„4³VÄ2aËîTspID)Oß›4­ÿî]¤², ¡m÷8ZýRr ‹í“7F¦Õs'j„¸±÷.%Ê|]ùL2¿ž‹¿“±º°ŠÊïæq¡XTŒ`ɰT¢}iWú5ãi±|!gÀÔm GèwQ©%n¼K§ÏÓª€õÄïÌEÎÚ©Œ~h.—Xâ¹èÇ kWÿ<æ NìÑg! „@ÿA mÄÍÈI›‡ o8! ™ížÑ҅χY—ŸΜ–—Dç›D ÕÄ Åiyc.S+m{`WùË’F‰&û©`Ydñ; Jˆ‹€qãÓg‘'\'vŽMÈ®¾úê1Lüý¶Râ†Õ;L¢â;0ÉO‹Ü=òåÐ+Ëf­’ºKY]qa0…3+ÏFˆVωŸÂÄE'K&W,ŸL!`55µVñ÷ø:CV¤––ÖêÄ$ÝÜìZ­c#Äw“ŠÃ&Ƶžc×¼âš7ù”&Ä»3 h±ª[kUÅ•òpôÖž¸Áõ …5•Ýwß=*œÿÏþ·WOÓðÝ‚w¦î^IÈ#´¼âïã…øç4SŽ<⦙<}ÙøŒKADZҀâh1Pn»í¶ï.Š–Pãn(žd ^ü‡Œ¹\˜  ¿Îgˆ ³ì ¿¤$5ã‰Å‘B¹K-§ÒüŠ~÷íÇ{ 9œ%§(±VUÙ÷ˆ‹+SË Å“?f>­}†P‚pGøÍ΋OdéýÑc#âÆ#£ÏB@þ@[ˆ~ÄFN¨lUºãè—,7Í. Õ‚ÇïÏ\÷“°°²Ã”ý¨½WéŠ!Ðâ†'7KÞ!mxN#ÄM­ø*L,n V¶ë)þ<Ó’dÖ¬Yv*±ÈA™@؆I|–`¾¢]¸ñÁzy„Rº‚Nþ(Ìæ’‘õ¼”¸¡íÍjâ´Š‚õÏŠ‰¾w“ÂRdÆŒYYÅs7¸à¢Q« KXmG ¨ ªò„¸,Å!kµÕ[7¡LB•Fˆ¯|²r ÉQTü½`à×+”FÜxB3úW!v”¸áï-«áXËe)ŸX@“§_±ç^ÿˆï&`My¸ŽÒšî Eß²¸e·X¶B|ûÕŠáá`ïþáÛ!-–*žÄ²ël/ÍX„‘i×üÑàzä…w‚ ‚lfŒÎÜ4!ÞÒ<-îJVL#^RâÆî´¼É»?uÃÊKÏyOÈÐFyVIf“Ö'/o#døm1=+mQ‚'ë^#Xòˆîá½Àªßo¬Ýø]õÖ«¸¦b]cbùÚw;B–c‘›eñæûmj‘dVMõÜÑÊ7ævW´M¬EÜx4ôY!0°hqL#*;JÅ¡aÔšËM>ëA·à±[Ã37œ=ÙÛϸ޽º^Sô[œ8ë©eÉ›2¤ Ïk”¸AY¹çž{z™˜ ì,Á$ɯšñ¡c–#d@>f¹bšK”‘tµÖožGÜø §(Á6yŠš=·ì‘c…¢ˆ3Ye’ÈŠqVÀå4ïFˆ¶1gEù¿ÿû¿pá…¦Ù1Øz1v¸‘ º¹‹äÅb÷ÛuR%+öK¯B¼x¢âƻݠ°ÔÚ!&}®W\Ë7äc–µVrÁ«ž«TžÅÍž{îYÝÅÆbMëPë»)pÞM˧÷ŠqÓL9òˆ›fòôJ1¤. wgð„UqãëOÌ)vUã½$(© J½)šÖÎŒ ewè±üp™4·J>ç–¾èÑ·_-W)Ʋí·ß>fë]S|ÿ§2¶â¾ƒ0&cÁÆ^¼uLºK•¥cLcÇ.ú-˜€á6î3&BdCœå‰'Ònã3}7/òÀúÍ,8Óùí@ü{º÷Þ{Wƒ«ã’E,®;Òç·ÝvÛø•ß²zs5»£ˆ†> ! m%n†Ž[!o6®XÝTÙ|“šÈ-|òž€µÍ¢ÊqéÂy5Óêbs´›¸¡tEÉ›²¤ y7JÜ»åÁ X|¥ï­¶S‹#n0ÙÇjƒÉ!W±VAR—(OÌ`)ËwÙÜû›g7L;6BÜø@·y ù³B FÄ÷0‹¯¸–%n, -yç¹9 NœGÜx¬VÌžŸ'¶*Ù+þyÄM3åÈ#nšÉÓ”]ÞÊŸZÄa=fäC=âÆãæ•y¯t¢`bWKYöùd}öq3jY©dÝ[ëœo?Æ5SËߗ纓Õÿ=ɶXqyBO†`Yqx…€N-Jü6×yŠ5n¦æ& árÜqÇõ"‰|ì³YP@ж·íž!3lKivÀb,2±Ý!ϱ‚Pá=®%7~«û<·&,°sz’c¢Ê?«èª«®ÊŒ‰f±{¸‡ÀºXX•#XŠ7>oÜe¤±|íÝ•Œß&³œã7Ý,Ï"Ä ióâ“ùàÄÞÇÅ«žÐ7Œ …|¤ýSÁjxoCÄ÷™:ujL"â&EJß…€¶7À4tÔ¸0|åuÂØ÷cÖß9 1ºzËÍ/<ð·0ïž«Ââ9ÓÃÒs{\×—Ö#Ð â†R×#o!mÈ·Qâ†{Qð™A°ú‹‰<…¬.¹ä’ø™)qÃ9Vç l¢Çê£ß:ËÛ…ô†BPP¸7%nˆÃÀ½6ÁçÙ?YB<“4ÆNVºZçN8á„jPÒ±X+ФåU‹¸A ²•ë”Ô2«&òØÀ: òfrÅ¥‰zc¡€ \NŸ>=~æŸ40>FÞ`¡ð| ¦ƒÜ¢>`Âd–<†Œ@ŽdíÈ/æük„¸!+#(øLûë ¤ Jâ•ù,ÅÕßÇç¬7œoÕvàyÄ Ï@q ÏÒ!ÉÒ ¶ô ”dbûÐ^,°.íÄ+þyÄ ù5ZŽ<⦙<=qCŸ÷ï*}™@å(…ˆokꊥOÌpvE™½üòËcûÇØHûÖ[o©¸c/ù¾“·jÏ{d¬³¶T‡¬E©Í#1xwˆ§¥ã 銈o?Òƒ)Èv¿/$¯ÿûí¼±Fáý7ÁšKAÛ²Ò2£ÀBTâBCÀh“¬:û­ ¹ÎøWÔÅ‘q‰~Ïs±l1¸ÀI2€ßHm/¸¡|ó»PKj7yíF?„`+$ üÍû ¡b»ú-¥Iõã„âüf›mVÝ% Ü(¿'ÕÀÝâ½qqÚ»+‘G=±þ“EÜð|,RÈ ,R1¼R+"Ë×»mbÙbù‘OŠSQâñ}•`ùžÄ%¿†1}ò¦ž0GƒPBÀr1µ3 /Òøß7 "B@ LÚNÜÛa#°WÃÆN#&lãßpž86‹fÝ–Ì›–<÷DX¶d§%mF SÄ ÕÈ#o%mȳ↉1X¿d S¶•õÂdˆI¯YÜØµÍ7ß<îBÉBž¬´™›“a¾¯¿þúÕ 3÷±{öÙglj0+ý)qóM ±çä§VVhP`KѼû9¿C…´:âEw¾ãÃn(õ¤qãI«Ôº «&ÄF’1ßlgzÖYgU‹À.:;5I·sgå—xˆå‡²bäV?LȹVF%nüŠ;ϼá†bÜ &íøÅeÌÊ‘ÅÖÉHžâêËœGܘõ”õ”nbe°ã Ï£Lví‘ !öñŠ`R$81iýê/ý+-”9”ÎvØ!ºc˜r˜*ÜO»ó> YVA^ñ¯EÜ4ZŽZÄM£yz‹¬°¢@ésv¨ÃËÄ7º\}72ú=Š­ ·§N:é¤*)L;c±b;KAÞB¶AÓŽTvíY»û I•Õx²ò/O™µë(—ï ć)"¾ýHO½°"¡^ô‹]+»0™Û×yÙ6Þ$¯ÿ§J/;ÞáeB !ÊÌ;Ÿ%”27+@/퀓½¼/µÜ£h[ïjäÉ È³º¡¼ë¸N2Vò>â>܃ç! yã[ÞÚ.+ÆM­vóÛ¦3Þò<°ç7ˆxS”IûLJú0¶{w5OªÑwyoùMc‡¬c*¿c*„Òˆ›÷Á’EÜð‹+ª-žP ( Œ­¶»¸Ûn>_OÜpÞ÷aÆ8ò0÷¤¢Ä ù@ä1þB(áªK?°±ò?úeñïdX@úPGI8"©U‘ˆ›"è*B "ÐâÆ 2tX`›p/lû½liíÕ&Ÿ^Ÿ›G “Ä ¥MÉ›…3nÏNýU¨µåw­Z6BÜ`±Áä’ØVžÁ$ŠI9«W鄊]&‡¶3Ž•‹Éþô“i⨤ÂDÉ[€ò &„fA’7L„Y9-"y[’¹×§±úa Á¿ˆ $21D Y˜› œyä‘ñ+ŠÆe—]f—âk%&£6©¶‹àŽr“å••_,R ”.žG|"SÀ,O,?P½K–]«wl”¸!_ä4ï9©Rì'è‡zhÕkË#¸á:¸¢¼Ùª®Ýã(¸î ¼™%nHϪ¹mom÷§Gúe÷‰4Þ#‹ØñJS-↼)G-â¦Ñ<Ù% Ë¢´ß‘‚ÒgãŒ'n¸fnO|Γ,w»zDy1Æ@hÚVÛ–¿wÉÛ­ÉÈî)ê6CZß~”Û[rÝ pÄëñR«ÿcYc8ŸqÆᢋ.ªÞγ¨¯¹¯Ú,@°†Ä=•q˜1WVÛåɾ×;2žx« Ò[ÌòÆÕÉ\UêåÕÈuk›,âÆ®‘oÚnàÆ¢€”>Ÿ>Cð;eâ FÎ¥îNôE‘²cŽå_ïX‹¸áÞ"ã,}€ØBž³|Sâ†<=á‘ÉÌxY„¸a|eQ"D„\IÝýxf=ÁúŽw Oh;æ.¼Ó&¾Þ=Û®C|a]Œ@1÷! „@ÿ@ £Ä ¬¶Ï·ÂÒùÏDt†ŽžüãgúR¨”¦PØŠR^Õl’©“ŽÈKRø¼'o¸©QÒ†{%nl·V´‰A<—‡*1h uZ!(sLö<¡ayC±7®SX-`Ñ3ÐÄ,^jaÊ*/VKX`u‚µÖy¡ÅD:OPÚp@9e’Ž¢ŒÅO£Ò qÃ3 ØÉ»ã•”;êY†µŠ” bv |p¯-l9ÖHV|³£>ó™ªÕELXùÇsqÄÅÉ»3p}Ò¤IÕ-ˆk•µ¼p "VSªœà"p~E°˜=–ÞŽ¼oã‚¶ÆZÅ õ±]ÝŠìÈU¶X¡˜"©âoå(›'÷¡Paµ`+ê–×í·ßÉ«s–Õ$)–ÄiAIÆ'K~‹Tù9mÚ´è^aAqýýôI#.òÈ1só¡Ï@lxeÐç•~öÄ y3Bj›éQ0±DÀj(•zýÂÐÜ")åLëäà ÉbÖl¦ørŸÕŸç—%n Þé›^x$”½ç¸½¥¿OßÌgsĺ%%Ùëµå<æ˜ÏUÆÈ)½Š@~`á­i,‘YÕä‘¿ŒKi_ä~<È,ïBhù9Á’eqc÷ãê† ‘Y¡ÙyÈzÞ9?s­µk–/V©k0ýÕ»ßáêãN¥nTæ‹5 c,ãLJ’ñ[DYR2ÕÊTëH™ Ó8Iôg¬|Yذ¾nùØ;Èw7†ŠŽB@@lj›µÿßj>ú³½’ý¨E‰&,¬ØÝ´Úák©V.R#ר*Œªü5*Í7>·Þ}(G(¶q@˜ø2ÁBXÁÄLžÉ.¤–É­¤û€¸¡ÝüÊ~#¥ä=£½QÆYáM•ˆFò¬w$ÉäJÜ Ü£ ± &½•M½û‹\GQcû]”q‚|H»žX`Ú¼87õîϺÞH9²òñçÉÅ—=ð@.ƒ7÷|›þáPô^܈ˆ%‚ÒŠõÊ›5¾>öK'â>ÕßþCš¢å ÿ”¸AáEP`ùCqEimeÿ§Þ(î1¼µH<ùâ?ÛºX,­ˆ4È;ÛúÜp©Â2™ -¤2d53–@­–"íF “À¢£–@JÖ#_øm#¶°Äo}²1‚¥qcùcAB øR‚ÚÒµã艋ÁD#ÞiHbéaÓ¬ í¸IÒçYÜ4ŠœîB@ô="nú¾ :^‚¢ÄÍ{TJW&òSÇU\‚V˜Ôñræ=°[‰V6 Ê$ A‘ÂÊ‚‰œùãs%‹É¾¤ûÀíå k,vL‘´¿õvã¤5OP.y`‘fÖ}X{á®ÙJÉ#nZùŒ4/H¬[XX`œÅU5UfýÎT¸»bYÒ.!è6VcæÒ•÷Êê-òÒ öóeˆ›¾Ä*‹¸éËòy¶ˆ›"()B ;qÓíÒÖR%n®ÇÊúC¦„«oßÖ2•ɼ[‰êÀª6® ¬nf 6!w$݉+èX¬ؘ˜-’Ö!`V¬ÊãÞ!é æf‡5®xe¬iŠ”°/ˆÊ…Õ"– „»U±#:ï°íàÄ5bDµÛ‚ŒmàqÙª…ï|+&ʇ•K;,n"èŸ7¸ra‰†`¹åƒ×wCuû qƒE}\dWFD1n" ú'„€è7ˆ¸é7MÕº‚%nÖ©ìR‚9úÌcãëÔº4™Sâ×Ìõqç¨gÞd±zÜá…ûâ*óq£ÀBÒ½X|b$ømÉ»·Äý§d>à)M³â@õŸÚô’2öQÀ–Ôì¾Ójé+â7žãŽ;®ºcYV½ˆÏBšZ;Feݧs}‹€7¾éîIþZ_}î/Ä –±zLEÄMŠˆ¾ ! º7ÝÝ>m)]Qↇ›»T7YÝ”!nÚ 2˜µÜ¤Ú×¼Äyb—øž{î¹í{rްûô8Ç—K/½4\sÍ5½Îë„B@t'"nº³]ÚZª2ÄYÝP ÛGï߸­e+’¹ˆ›"()MŒ´áYÛ”ANi…€B@! „€h7"nÚpæ_†¸¡ølïÊ&H7XÞˆ¸‰M¡-BÀ“6ì~ÓI—ºUAÙ! „€B@!0€q3€7¯je‰òñäͼ“ÂC‹× Vœ–žØ²­ÂóÊ›žq“"¢ïe`÷(¶Ì&€(GD¤MY•^! „€B@N â¦(wÙ3!n¨nSìâÁá!0P ¦ £Œx ´¨ê!„€B@! "nV{ªM£Äe3iÒ¤€Õ[®²½¤Dôؘ¾æÌ™©Š°é/-§r ! „€B@Á‰€ˆ›AØîÍ7ƒ2UY! „€B@! „@Ÿ â¦O`ïÛ‡Š¸é[üõt! „€B@! „€EqS©”NÄÍjLUE! „€B@! 4"ntófWNÄM6.:+„€B@! „€B ÛqÓm-Òòˆ¸éÈz„B@! „€B@ ⦠ö·,DÜô·Sy…€B@! „€B`°" âf¶¼ˆ›AØèª²B@! „€B@ôKDÜôËfk®Ð"nšÃOw ! „€B@! „€è"n:…t=§YâfuÖ “&M +¯¼r=zt2dHÕNE½X¶lY˜?~˜3gN˜9sf˜>}zïD:#„€B@! „€èBDÜta£´»H76›l²I3fL»‹¨ü…@[xá…ÂÝwß-§­(+s! „€B@! Z€ˆ›V ØÏòh„¸ÙrË-Ãzë­k:sÁØðÔè‚'‡e£'†!C‡u¹çØÑçéa,ÄÆ&Ož\­Ðƒ>î¸ãŽêw}B@! „€B@t"nº­E:Pž²Ä'mî2%,X}û”2ÿ"nò±Ñ•b¼ô¥/­8"oŠa¦TB@! „€B@ô "nú÷>}jâ÷¨)S¦ÄòÞ>z·°hüÆ}Zv.â¦Ï›`@À“7·Ür‹Ü¦D«ªB@! „€B`à! âfàµiÝ•!nöØcÓ¦,m¬b"n ›EÀÈbÞ\uÕUÍf§û…€B@! „€-G@ÄMË!íþ ‹7fm3oĤp÷ªûvMÅDÜtMS ˆ‚`QFìYÝ ˆæT%„€B@! „À€C@ÄÍ€kÒú*JÜl·Ýva5ÖE­m̸=óá£ÖØ*ó|£'EÜ4ŠœîËBÀ¬nf̘n¼ñƬ$:'„€B@! „€è3DÜôô}÷à¢Ä¹IMW±¶YaRn!lžºø˜Üë\XiÛøÊ_+DÄM+PT†»Mm½õÖAîR†ˆŽB@! „€B@t"nº©5:T–¢ÄÍÞ{ï† nZíðÜ-¿çNýUx¶ò‡`Y“’3 +¤Ž¿>ñM'7]K7MC¨ vÙe—°lÙ²pÑE%WôU! „€B@!зˆ¸é[üûäéE‰›7¿ùͱ|S'‘[ÎG¶w¼¶Öáµ^#x wš%oDÜä6‡.4ˆÄ ò§?ý©ÁÊݶÏ>ûbH=ñÄá‚ .(ws?K}È!‡„qãÆ…¿þõ¯áÎ;ïìHé_þò—‡vØ!>ë'?ùIX²dIGžÛ ±¾uë­·†¿ÿýïÝP¤^e°2†þß«ò81bĈ°téÒ–÷û1cÆDËÄVTáCúP:th¬žzê©ð›ßü¦Ç¹²_>ðêO̲üã…n§ýèGã˜|æ™g†;Ð}}•h›m¶ tPxàÂøÃXŒÍ6Û,¼öµ¯íU$ÆÆ‰B@ô:NÜŒßñƒaéüg"rCGÏ\÷“hÕÁj·¤3´Š¸1)sƒòîRYÖ7­"o:EÜ ð92\sÍ5á—¿üegçŧl¸á†aÅW Ï>ûlœœuôáÉÚ- 1d˜P/ w$&›_|qX¼xqò¤b_±{ÃÞ6Ùd“ˆÑã?®½öÚpÏ=÷Ë #U§‰›Ÿÿüç-ð`>P…vB‘B>ó™Ï„|°#U=ꨣªÄÍ¡‡æÎÛ‘çvÃCÀÜ!«Þ÷¾÷ueÝûºÿ£ØêSŸ ö[øýï¿Tßäþ-·Ü2lµÕVaÝu×3Ñ¿o¸á†h½W¦/OîÀŒs!ò XLÒæÇ?þq˜:ujz©æ÷6Ú(â3jÔ¨^éî»ï¾ðÕ¯~µ´ÒDF"nzÁÙô ”Û³Ï>;®~ÓÎ_ùÊWšÎ³hƒ™¸ÁzàœsÎ ¼#X|ñ‹_, [ÇÒõ%qá{ÜqǬGL¾ýío²Îx×»Þø]Ì,/3pG¡Æ¢¨–@Büã¯yÍkªÉfÏž•ðê‰O>ùäHüd%‡a,Æ¢¥ŒxB#ë¾Ç{,æÛÈ›,׆ǯ}íkY*|Îò-Cܼå-o X"ý¸ùÑ~®¾úêØ‡h/L7†ˆŽB@þ@Gˆ›a+¬†{ITæG®¶q6vbDnɼ§ÂÂ'ï Xn,™ûxXòüìþh?¨A_7ÀÓ,y3P‰ÈV½RÐWÄM+Ê‚)ü+_ùÊøF0±Ÿ9sf\%ÄÒáD „KaÅ—ô¶šüÜsÏE‹¤I“&E‚<î½÷Þ˜¦H~>ˆFk>£î¼óÎ1³Ã;,¶Ukr®ŸË`&n@‚ámo{[ê _øBøÏþS´¦è+âæ=ïyO$^ L¼Ô#nÖ^{íHöLœ¸|î½/THõÇ*;Ñ=úè£aüø•„ ÃZk­U%Oß ./¼ðBÿ¨êç5×\3œtÒIÑò®z²ò¡,qƒe&xÚ¸øÌ3Ï„¿ýíoÑZ”qÍ,3/±$)J²ànøùϾZ4, `±œ|Õ«^U­'n8'žxb5]ÑF°PÖï}ï{Eo«›Îò-CÜ@vbUDûuÖYsé¬[™œS¦L©ºJAܤB}ÌWÄMŠŽ¾ ! ú/m'n†¯T!lÖÜ&ŒÝbÍò€®E³ÿæÝyQXðØÍañ³µ­@Ò{õ½<í"nê¹Jù’6CÞ DâæàƒîáŸÎª-fé}AÜ´¢,(%faÁjï1ÇúÀë^÷ºª9ü¬Y³¢ ïyŸY%_ýõãå«®º*`.ŽŒ;6’5+­´RüþÝï~·´O¿ˆ›]Ëþ¡«‚>Ì*:$^'e°7+¬°BT@iÈÌÏ}îs„¿î³:MÜ0601.™@¬²Ê*ñk-âkHH#{þýïÒ㆔ Vf{íµWߌH¹ì²ËÂé§ŸÞ#)ÿ!Q,OÈÊÈ÷²Ä dÿ«_ýê˜ú®Qž3Î8£j]tÞyç…ßþö·=Ê’÷…2ã~…üå/‰îV–÷00°òƒÏ#¼ýöÛ÷è'àA?'¸{*¾ÿ§Á¹!~ ¯ÍÅË-—ôX¬x<À‘¾€[ÒOúÓø¸?ÿùÏÕwŒ¸`kÈ›÷¾÷½u‰âA¦’?}0%ˆ£!ÿ!DR¡|”…qç|ØâKAêi;S†¸ü&VR˲ʬg¨‡¹ÐYY²Ž}, )þ–žâøÝdÌâ÷£ì˜kK-‹0Ú€wßÈ)žÏXuýõ׬+©“Ë7‹¸Áu—±š2s?¸AzãֆжHÀ÷_ýêWÑ}™ßpÞ££>º×X¡Í»˜öî/"ô ú´w“¦|XbÑvéïÝ 'œcçµ§ˆ›"¨+B ÿ!Ð6âfø¸IaÅ­ö c7_¾¢UšyÓ.ÏÝþû°xîÌ¢·(]IZMÜ4h˜¢—%oÊ7([o½uÜÁˆÉ3“å,aò˜’7yÄÍñÇ_%mXEõ;N1)¶UÜô9(ÔV”-#.˜œbŽ饗Vo1¢‹v”@BPdÓÀ”(H¬ô"×]w]œ ¶ª,?¯vZQ o¼ñƪòäþ1!ÅÀO”]’êGï¾%p@tSà„]©uo£Ä JŠ“)™Yy£3Ù÷¦¸Ò('¸g Ö3ek_/"‘Fôøkö"Ð í áÈ„Ÿra½b’7–Ö®§Gb^°ST–Ø®'Ô ¢#Ë»^9P òZ ß?üá1¯F=âÆžïïñŸóêVï>êCÛúw%Š]}°˜ÉÆ\^ c¼@Šì·ß~þTÏXI}ðƒÌ\ñÇ ’Éê œ·v¢ÜyõÖsb8p–CXBd ×Q2Óí•­ÿ§»ªášáhí © Éaâ ;çXºm±Åñ”'èóœ¿è¢‹ªÉë76Ø™]ë LPøù]1òò†ßÞU„vãÝæ¹`…Øî?|†|úãÿ؃,k„¸!†”ã¿þõ¯«qMx†—O~ò“Õß‚"nMŒWŸìˆ#ŽèõÛay!”÷ž[º¬£,¾|:H1ÈOføë|Î"Œ,ß”¸Á:ˆv3Ò†¼deœ9³bY…@’°èa®´3„O^yho~w²H<Ë+ëH_ðA…³Ò¤c’Y´î¿ÿþ½nqÓ B@ ÚFÜŒ˜¸a´¶É‹k“‡ñn°ºYôT±€¥yùè|>­"nxqm°¼iÆê†|Ê77<A1aò޹5“^¶Ï´‰Ê– &YijrÅWD7KÏD×by ˆ¡¼1)eúú׿¾:Ñ"½'nì~lqƒ‡M@qUÂeÉ FÌèV!Ÿ|òI¹ú¹lY !ÌÚãOúSøýï_ÍË0« …Z‚ »Á€kÞÊ.Ö Lœ,X]/*7LæQÆÌâb œQR!Ì(2÷ßÄØÊcŠ«}ÇëòË/Û ³50$”­0Ón>À)Ê „Œ]GYE±¤ýèÓX'Y|Tñ5…Ûž‹’ñ¯ý+’gœ#Ä+zXNÐ×±Ø (&[M áwÊ)§Ä{슑YPAD˜2k×9)ʤ˜„%/ÊQ@±¢0 °\𖵈›FëF»˜ ý¾ÍVËàŽë‚ç­+vÛm·ªk+è”(•`IaÚ¹õÖ[*A^OŠŸùç•rH5ê‡5H´G„~Ǹ“’{à@{¤±O¸òZ*¾ÓÇÒ1‚óY’¶Š;e£ŒÔ‡¶12‡k”Å×Äú¿'n°übܰݞ7,Ž÷aafî|Ç­çчy±ø±þÀõŒe îmôË©ª®éïëw¯•twÕ©S§žSU_·öÞgà 4<1°à­·Y*Üàá@˜–%H˜ÀÁÑ©pàó¾tC¸¡ °ð¬áØíÍ,Ë1Þ ó€Èð½¼ö_ÿÛq•õþá!Ùë ZòÌD2eÊ”80Âk ÈxÏC3^TYfa^¬ãáÝ¿]Ï*ï—5#ÜxoÞ¸#(ùs‹(^\<œ„zQ³+ßýƒßXvœéTÚ~Мºõ³-obé3ÎiöË ÍÚå·e0O?¥ƒ-8šðÆ •¾áü1c€M¿š·ðü0óáYÂåÊ´ƒP ÚŽ‘»‡k׃&ü¼ûÜyÂM+ÇÆ 1¦œfÞá3‘‰²$ÞÆGCtÄÈ5Âìjfˆ+ˆ«œÛi¢n{ÓÏþXÒP'úˆãÁòãUÂ?FW¼R>¶ïqÀӷÍÊd}úþ£} ²ÓœTÞ–züØùo âçwm*mÅO±¸íßóMIÊxaßy‚ë°z b â˜ÏßâÅ#„RòØ0XçAh3ó ¢Õ;ßùÎxí#´äY3 UxVaìÇ{õùýøœS$vçú-2;oR2ÝÆ'FÆ;)õ¬JËûßÖ¿yýd!JÜŸR¯=Î{þ~ ”¥^V¯ÝWH Ýä~ˆ¸F;Yç­¬pÃùJÂÅ  ñÈ^!zqLeÌ®qÊf]Ãþ½óÎ;ã}²n  þ#Ð1ᦕðóâè¿îèηS¸¡ÅoŠD f…[Y– q‹_Qy¨ûØ+yT8./Ü ~ËXŒ92¾Yeàœ^¸–c­7< 2àË3Ñ ¦0òíX¨ƒY —òaR3gάñèHëmT¸Á»OŒAk: ³úMÈ¢­<  Â'$™×CZöôÆQ––M7#ÜøyÞ ‰Á$^]ØE]=jønW¾“÷(+Ñ® äÒpl3ñO€,c¾Ûn»ÅU î,é°pÓ~àˆÑV ¿Iw€ ¼¥ëýoò¬½öÚÑs¡#Ë6¬xOYN Z×]w]V±ÌeÍ7´Ç 7{î¹gô«wŸhF¸Á[‹Dï˜ßg\àþ#¿Ǒԗþ+2ó)‡Ùž¿Ó¦M‹Ue‰_Eû0%¯Ÿ¼8Á}ŠpSþ–pí˜çbVýV/‚7âˆý­DÔ#wU–•nhC^ؘJHPì=w³öÇ2&oZòÊûåž„OFßE@D`pèŒpSù7b↕iÀ£&¿<]dYŒ‹»­2-øÃÒ¹‡Ê_ܲ›©\:!ܰûVÅ›2¢ ûiF¸)ÊB’I KAä°²„Ú„gXíÁ¼KÑ,Ix00Hé¤pC»ýÛcËËãÃŒÎ8ãŒ8àäAÖè_>’UÿoÔãÆ{¾0Û³’d™±ÀÛ„u‘ÊEH@Q?"¾xà±¼L®L[]Önüƒ:íÏ“òöoW É*‡Ç‚€n|¨ylt{Î1{;ËfÞc6à.„˜—FZgÞï4‘/ƒó¸Êrÿ/Ó[ dlЕ·o¿ÜçDÉnZ=6<óðfâz/j›nh#â÷™Ô.ƒ™2Ý'&öyxÒm²~çysø$Y¼¬ºÊ,³óˆ²Ü³ qÖrÿ*hynìüO·AüÆc!5ÄD¬(Óˬî"áÆ{<ùvÛ5Éý; =E8±$³^DÁ£!Ž04›–ÛÚà?›nhƒ…h h$Ì7‘1ÖÄ ¿ÿ¿ƒ„ýÝ'(ïCµ<'_WÞwkC^?‘˜¿–CÌ׃§Â'Ìáõfõúe|OÃýú2 ?¡ßÖ¾›‡P=ÎÊû{ÿhËš„›²¤TND@Î7FxÚŒ™ºK˜ðÖC"öÂÿž4a!ÏÛ!o_,·k§„?½m£ÂµvÒ¾<#†õx[øPÿ&9M¬luÙÀ?o`è…1¶Ék×çëÄY¨†!æÍ›7/cK=g¨{b"×b™‰9©pCYrmpŸ!äҒزƒa_\ ˜o?ûð×s,ðÊôQˆ{Ìþ“-I‚ó8§åËþ¶þ£|‘pã˜~`ïûÁï“v"Ò¦Ó*“ð˜}by¡c¬#q­y;ä ”Ê„Ö›âÎsÆr¾!Îx3Ö,ó‰gi®*¿-ßmYÞò¦3Ìü \xJdå´ðmÚa‡ªo«óròØ>äf b}}é÷F…¶ŸñJ(Ož·CºÿÛ®Y@+go÷½ÇM;C¥òÜþØáY‚ù¨µ«Þ'…Muïôüv6ð/ðe…Šù:о{áà 7­›Ïa‚×mFF.#Ä ,K¸ñíE|Å o aý›z_W:kޝ§ÞwóœàžäC•êmWo½õåŠvž#>ù@[ÎöÜG ÛäǸ&àèÙ²Üò®¤9ŸXgæ=PŠÊ×n¬^!6¡O¢á§®Wl mª(Çòzž.Í7ˆ€CXÑñšàD¹2³?!|ÛlUy¹°¨‹c"çš¿G±¼Œ™ÀRÔn_ ¹/½ï}ï äÑ2ÖiȨÕ˶äãØÍkNjؾî2ÂMÑ½Ê %Y‰Ôý¾ì»‡äÜÆë®¬I¸)KJåD@D`pè¨p3døjaÄš¯ «¿aï0jƒ É-zä–0ï/?KŸV.[\XV+[#Ðiá†Ö•om¨»Yá†Y@,÷ õ`°8ñ„ÂÐaâÐâg®1á†Ð)ËC"Vâæ±4$Ê 3xå $®Cè {ý›±n 7„w(wo@y½½’˜öÛ%’(Ön„’ô˜i£ÍlüÎ —òÉHq÷Éyá‹@ažOÔg €„ò@ΧS§ZB]¦Ý¥ÍX3 !fäŽÀò “‘ŒAod-9´ nØ— –ŠòOÔKN\4á<µé¬9‡É)шqMÑW9)²<®là_ÔlR†Ð«¬7åyíÊnš=6B£,,>Ì«­°zµ›óâÂÊtï6›’ÍFå÷ÇtíY|VGÑg+âWQ½Ö”ÉK@í“sïµP'¶±óßÄ*8h—þæYî7¶ãœ#ñ¸‰Uy¹¬(›gvÏÈê'îa–÷Œ³ô˜¨Óx¥Þ>V/âÓ€c$[·úøOYᱜ™'¹/šÑç\ïÆ‚þ@¼)c–š²þžaÛú{ 3¿ÙßJ 7FHŸ" "Ð_:.ÜÎac× CǼ& ·N6fb\¼|Áܰ쥧ÊÏ…åóçXQ}v˜@·„#O¼iV´¡Îf„Küc•eY¹X²„¶ÝrË-c®Dêä-œyRð@Ëofú°‡^¶áM'3:1Hâmz·…Ú€ço1Þ§É5㊌ÿŠ„Øí•Á23Ÿ;ˆe0VácB!M°²„¥”ñÂWê!…· m ^¨Ó÷)ƒ=òª4jÍ7ô/ÇÏÌB¢ 3,1Ð@Ä È\ôíí¯µ+kàjëì3+TŠuäû8õÔS«ç‚ƒxñ¦ñ˰éÛepÃ.O¸aˆ„#` V~[áŠ÷ =r á½…Qy#|²Ü$¼ý'GFjeÚÁy…X  Ã+‹ÐŸÌ2´O% òä)Sâºtð–'ÜP¸Ùcc Ê9ˆÑǰEÄ!Oá3þz÷ƒ°3+×µó±ŠPzIeòJq@à\ÁÒ)³m6"ÖqŸaPÎ5†gâ×—í3kàÌ`Ýån½õÖ˜öX– àÛPO¸¡¬J_W;¾ 7¶Žýd…%y‘%¯-þœ´2Eõ±¯¬<ã¼åZK“Iç•÷ËM`Éë'Â5}x±ßÖ¾ãÆùáÅ(«7nØÆ NäRB”*#Ü ØðÒ ï^„˜yÌYë}–9ÎôÜóÇ‘Åá OH ï%'®× Z/" ƒ@×…›ÿ÷á Ê…/Ï5tô„ðÔÅÓ©AÔJ\ØÛ¦¼CÃM»}Ò!yEJ/÷â 5+Ú°m³Â Uƒl›„P޼[báþcºjº<fMiËÛÃó*o_ñ8aeŒvÑ“EáÉC«…ã¤Ä#f»ŠÐ€×„u²–g<3h(êÞöâùÄ7o!ñ˜ò9vòêÎ[Þ¬pC}\OäN²7ý¶< 8N®Þµžõ6-uQžÜæ½ÑÆÓƒÁ§yõØ~Lá™ð’š…MÔnØŽ~@â¼öƒdÖñfO‚¼ðš]wݵ:EzVrÝFÚW R‰c¸ï¾û¢W†Åhƒ9¦NǦOŸ^ s‰ *ÿ5{lˆ(tPÍà Þ ¬PóVK=`µ"„cð,Ù–\Nœ#~ð+yå?ú/“tÀȹsYepfy/ü6|Ç; Ï?8!8ñ™š…Ù4:èô " !`xøc# 01ï1cû¯wþsÞ!âbYç? x½ày?î)Ü[ÈAƒÐ‡áý“u Ä••ÿ|¢á4tÆÊð‰ †¢y\Å-Í#W¶øŸ…Æe%_¶œUô!ì}Ø’í–~H=¿XÇß#Dàk¯½ÖŠV?ëÕË9wôÑ_ªLeÿrî¦ê†•/´“¶”õÒòÛòÝ–<á†2äŒâÜâ>ï{+žwôMúwÇêÅ‘däÞO9÷ÌóÔ<Ñü,|x÷ËÎ̼¹ç²ˆP•ŠYxVÑ/Ûöe>ÉáÅóçµ}Íß4ÚËýÜ/I¸/cn<}ÁM ëÂÍkÿûÊ¢ÿÝ5¿õ£óÊ 7ä±àAü¶µÿ; :¬- #DjdÅë*õ¼j¤òV…›FöÕHY¾ÌŸ| Šl°ÄÛm 40fê°|0ìCe;CᆾºòÊÚûS#{#¡2aL 2èÛ4|¨‘º)‹pÃ~dáÁ„eç]#õä•e°³a%‰c9&ÂF Û0åþÁÛø,´¢í³Ö1¸2¾x¬µã8›96ÚsÄ_Äx4›{ÿà’ NYÇmË}ðlAì!ôÎr®Øzÿɹ€÷Yþ„y:ÙˆyáÆ<·xËê£RoºFêÏ*Ë@•$­ Ô\g1÷Óc§aVYu–YÆyG”û…?Þ€ô=ÇI›8nŽÁO–z/FÊìÛ—A G°(ºö(Cþ0®WÚ‰pLrkΙ<+S/É€IÀÌy˹‚`Š0ÒŠ™ÀR$ÜXýœ[ü %TözµX=Í|zᆤø÷z„Cîœi°föÃ6<›Áa¡T”*[¯_ á#Ÿ«!¯¼À¸ï"ܘ¾ƒ—L»1I‰ñ¶ ÙÚ•¸Q¯9xµÆìQL¿Mh,Ÿ€ 7„\ê†rÔLrùü½´¾f 7äÒš6mZ<`<íÞúÖ·ÆïÊqÓú9 D@D WH¸é•žèb;Ê 7<˜c>{ñØðøút±…Å»jD¸áaŸxt Zuí.nUíZ/f!nÏì›ÁD‘Ëzm úÕ-–߆œ irånµa°î‡¼$›ÅÂO‡>X·Ž‹œ$<…9‰¾Û²Ã1¾ ûe`J"^ójdYjiŽ’t½~÷n|kã8w{ÉŠpC¢eDÅÔ$ܤDô[D@. 7·ïšnyYá†X¸T/yÝ4"Ü4 Iöó¶áM¹Â¤:Óå_ûÚ×âl,3gÎÌLªÚ™½öo­LάbYƒãvÙwß}còà æ¯²ÔŽú‹ê — ¡w\»x6"âÌ™3'æa@¦i–õ>ò¼áÁä /%f@ë%#ñ<³Ûá4cÆŒ^jZM[È­DrêÔ®¹æš˜¨;]®ß" " €„›×g-·¸áƼnØéÝ£þ-,°iËûoµ 7­Ôö0цïò¶‚LD@D@D@D@D  H¸éÅ^ép›nh ³y0› Ö ž7nbWè¿xц)\»F×B³µ©ˆ€ˆ€ˆ€ˆ€ˆ@pÓ‡Þ¨p"/ÞÌ1)<¼lݰx\eÊÑQkµmªð²]!á¦,)•ó˜=j„ qJa>1‰6ž¾‹€ˆ€ˆ€ˆ€ˆ€ô" 7½Ø+nS3 M"lŠÙ4ÊL‡ÚáCPõ"ÐrÚ,ZɈ[¨E@D@D@D@D@º@@ÂM ÷Ú.šnì8p&Mšð`5jT`êI™ô2fØav¯çŸ>Ìž=[‚M/w–Ú&" " " " "PC@ÂM ŽþøÑªpÓ”t”" " " " " " "ðêpóê÷A×[ á¦ëȵChŠ€„›¦° ì$Ü ìþSëE@D@D@D@D@D@ú‡€„›þéëê‘J¸©¢Ðèinzº{:Ó8 7áªZE@D@D@D@D@D@D Ý$Ü´›è¨OÂÍè$5QD@D@D@D@D@D@*$Üôái á¦;]‡," " " " " "0 H¸ÝÖZ£%Ü´ÆO[‹€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@·H¸ééÚ„›ê 5ED@D@D@D@D@D@ H¸)€3XWI¸¬=«ãl$Ü ¶-q<nJ@Rènz ºÝ 7Ý&®ý‰€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@s$Ü4Çm@o%áf@wŸ/" " " " " "ÐG$ÜôQgÛ¡J¸1úÞ& ᦷû§#­“pÓ¬ªTD@D@D@D@D@D@ÚN@ÂMÛ‘ö~…nz¿ÔB€€„›><$Üôa§ëE@D@D@D@D@D@$ 7²ÛZk´„›Öøikè 7Ý"ÝCûéáæo|c8à€¬Y³ÂÙgŸÝ¡C‡†OúÓaÊ”)aÆŒáž{îihû¬ÂÛo¿}ØqÇãªsÏ=7,_¾<«˜–5I`uÖ ‡~xX±bEøÎw¾ž}öÙ&kÒf" " " " " "Ð_º&ܬ\¹2’·å… Ÿß‡Ž^#¼tßÕñû!Cú‹ü«x´7£G'žxb þð‡?„ŸÿüçmiùÅ_FëB(¹îºëJ×ûþ÷¿?Š>l0oÞ¼pà–Þ6¯à¾ð…ªpC}Ô+kΡÍ7ßa½0zê®aÜVï CGŽÉĽbÉ‚ðÒ=3ÃÂoË^x"ض™…µ°e½ ÜD=á¦hÛf×I¸i–\ëÛI¸i¡j¼:&Ü€lظµÃØÍÞVãKœwÇ%aþ¿ Ë_z¦TyjŽ@§…BbFŒî¿ÿþ°téÒ†ÙNᯒßwß}áÅ_ÌmK;„<&Ožÿ=øàƒaöìÙ¹ûKWŒ?>l¸á†a̘1ÑÃèÉ'ŸŒfZ®è·…"=ðÀ1 °/;lذ˜c†O’9“$¸¬Ñ¶-·Ü2PïsÏ=·Êfk®¹fØd“MÂßÿþ÷¦’K¸Y©ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@•@Ç„›!ÃG†Q“·¯ÙõóaÈðÕª;,ú²rÙâðÜ ß ‹»-¬\¶¤¨¨Öµ@ ]ÂÍ)§œìÌÀôÑ~4ð¡Ä'š~úé§Ã׿þõðÔSOÕ´˜\9[o½u'>ðÄugœqFXýõkÊùÌ$õÄODqã /Œ«þøÇ?†o}ë[¾X˜4iRøêW¿Ö]wÝš¶¦ƒø@{ÒY£ZnCŽ:ê¨ÀÌTþØÙâ ù]òDœ·½ímácûXXc5j޶’ úÛßþvÍr~ì·ß~aß}÷Ë9昰Ï>ûDa…v`x¬‘3è»ßýn@tùÜç>¶Új«¸ÎÖ?óÌ3±ÍiæÿøÇaµÕV‹œðƒBÕÆWÝöùçŸ'tR@˜"m·$ÓB¨#õOúÓê6|Ùc=ªylN;í´pË-·Äß,ϳŸýìgá’K.É[­å" " " " " "Ð:&Ü ¿nÅÓfÿ0f“oä‚¿_æÝqiXöâ“ m§Âå ´K¸!qðFm…„ò˜d"Â΢E‹ª«}²Ú½÷Þ;.G¤Xo½õªeÒ/Gydxì±ÇÂꫯ§g=B€âÉÑG]# Ø:ûœ3gNøÌg>.\h‹B+Âq¨V–|AÀùÆ7¾þüç?׬aJô¯|å+…m½õÖ[é§žZ³Ý‡?üá°×^{Åe<òH ¹o–ýæ7¿ o~ó›W…¬,¢Í!‡/^l‹Â¥—^½¥ðN;vl01¨Z ò…þD8;è ƒüâšï?úÑÂ/ùËê2ŸP¡ ÁíãÿxØ}÷Ý«eÒ/—_~y¸è¢‹ÒÅú-" " " " " }E cÂ͈µ7 ÿí §C°™û»ÓÂÒgþÖÈf*Ûv 7¶k¼j®¼òÊ–„‡ÇôéÓ£Àú›o¾9œ~úéV4z¡Xh 7?Ìbu`e:nBo°³Ï>;zðo¼IŠ„›óÎ;/¬µÖZûÛßâ4ã„íºë®açw®z‡àYâ§6oV¸A$zÓ›Þ÷‡rÓM7EO™×¿þõq&DቲÿþûÇrü‡à… c –ÿûß°]vÙ%ì´ÓNaøðá±üUW]~øÃV·õ ñ‚Á;åá‡ïz×»âöÕ•/TxÀ"ERè~ðƒU±è¬³Î 7Þxcµ¸ 7¶€uLÕŽ÷ý‚˜ã aéÚk¯ #GŽŒ^@ö /T=lø%ÜÐxhÁ)Þ1ÎŽC¤{饗âwý'" " " " " ýJ cÂÍjënÖú¯“Be„ØÛÊÀ|ÎÕ_‹Ÿ¼»±íTº4N7„1Ó“Ï2uêÔèiBØE ³,/ÇMžpCÈ3T®„ˆAH”7¼O¾øÅ/ÆEˆ@_þò—««›nÈgsæ™gÆ:ðªÁ‹‡P.o¬§vî¹çF„ïÞK'+Ü QëøãÇ‚XEh”±õ ¢Ì'>ñ‰°`ÁªÆ1r¬Þ1‡zhMnšiÓ¦Åe¬Gh²cà·n.»ì²ð“Ÿü„ÅÑ™š1cFUôùío…5[Ï'ëéÚLŸX–pWTþSŽ#¡OX•@g…›=N^u%–̹êh 7%85[¤Â…¿¤m"d†êqÒ á&ÝwÖoËáB˜$fÍ7x‰pÀ± BzíI ¢Ã;,Šä¬ùÝï~‹\|ñÅÑûgaEp9 â™”e‡~xÌ#Ã:D¨¿üå/±˜nð†Aò¶ãŽ;ÆÐ/–áÉCo£G®† Íš5«Z–2&Ü ¸Xî!¿íù矽¢XF¨TšðùØc oxÃâ&´ßrI¸ñõ]D@D@D@D@D@Êè˜p3rÍÃÄ?: »fùÖTJ.Ÿÿl˜{ýÉaÉÓmh;.O Â͇>ô¡š\)ÖË[ƒGŠ%Ôe]7„B¯˜©i„ ñ‰z-.!8äÝ1kF¸AÙf›mbäŠ!wNÃ3á#œ‹p«,ó̯~õ«èÍB9/ÜnE¸’·Í7ß<œxâ‰qQfeå­Â3顇 ŸÿüçmqU¸I=¤¬@^Úz/6qÄáñÇ«$Ü!}Š€ˆ€ˆ€ˆ€ˆ€ˆ@c:&Ü _cr¿ýô0z£jчþ^¼í°ìùÇÚN…Ëh·p“çA‹l¦¨n 7LÉÍ,J$þev¤m93IÑFŒü0ßûÞ÷lUÍ'žJx,aLµMæ…ï‰WVþóÂ|l=Ÿõ„fûä'?é7‰ß%ܬ‚D D@D@D@D@D@D £:&Ü 1:ŒÞðmá5ïølCðÜg„…ßV.ý׌? U Âu VáO’û©µ•È󲬒xñ’%Õuín,l¨H¸ÊꌲÂ͘1câìMÔ!á&‹¤–‰€ˆ€ˆ€ˆ€ˆ€ˆÀà'Ð1átÃǯÆmõÞ0vË=J‘œßUá¥{®¨L^›àµÔÆ*TšÀ`nƒ:øàƒ#f5b î¿þµ6äÎrî´C¸a*ïí¶Û.îÏçs©×í •’ÇM=ÚZ/" " " " " ›@G…›!•°•_ÆlöaìÿYHjþýׄ\–ÎV®XQXV+[#0„ƒð6ñ–7«ÔI'6Ûl³X”/·ß~»ß,x–v7{î¹g5O³/1 Sj„;‘Ä—.¦û¶6Y’äV“6áæê«¯„ ÉD@D@D@D@D@D@^&ÐQá&îbHE¼©ä»9iË0jƒ+Ÿ[„¡#ÇÆU+–ÌKfß=ò¿ñs)ymVJ´éôÉÙë ù_'Â~ñ‹_Tù—<áæä“O›nºi,vÖYg…o¼Ñ6‰Ÿ>q;„8š….Åtà=V››‰Ù¶6Ø`ƒXæûßÿ~¸æškâw?8³M‘ È[ÙéÀƒpãCÇÒdÉž‰¾‹€ˆ€ˆ€ˆ€ˆ€ˆ@?è¼pó Õ¡«­†™†TD›¡£ÆÇ¥+½VVÄ›å æ†‹çõ#ÿWå˜{]¸ÙÿýÃ>ûìÙ,^¼8üú׿wß}w¸ë®»ÂŠŠ7Vžpóž÷¼§:Å÷¢E‹âôÜx¹¬—0ª©S§Vy·C¸¡2/ÑÖ›nº)Üpà aã7oûÛã'åXÇÌ[fm´Q`F¨aÆÅE÷ÜsOÜö™gž »ì²KÜvøðáq]:3Ô`KN<þøã™Õì´ÓN1¡²Ÿ ‹‚ðÂó„¯Ô¼GÒ1Çî½÷Þš"›l²IUì¹âŠ+Â\P³ž6øƒ>Ûn.½ôÒ0bĈ/.§væ™g†É“'Ç™ºöÛo¿tu8ôÐCôiÓâòÃ;,Ìž=;~÷»ßsýðã´ÓN[EÄZýõã¶„º™%á&¢Ó" " " " " }N@ÂMž&†08ïuCˆ;vlXR™Ê»‘°ÿ„Há]3kÖ¬†¶m– ¡?O<ñDxôÑGKW…pCø¢ Û"ô üô›qüãÇâ ¢—LD@D@D@D@D@ú€„›><’pӇݣC* 7UýóEÂMÿôµŽTD@D@D@D@D@D``p3°û¯©ÖK¸i ›6®pÓuä¯þ%ܼú} ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@nÊPde$Ü ²Õሀˆ€ˆ€ˆ€ˆ€ˆ€ Znm׿˜„›|6Z#" " " " " " ½D@ÂM/õF—Ú"á¦K µh‘€„›ÄÍ%Ü Ä^S›E@D@D@D@D@D@ú‘€„›>ìu 7}Øé:dI@ÂÍ€ì¶Ö-á¦5~ÚZD@D@D@D@D@D@ºE@ÂM·H÷Ð~$ÜôPg¨)" " " " " " "P@@ÂMœÁºJÂÍ`íY—ˆ€ˆ€ˆ€ˆ€ˆ€ˆÀ`# áf°õh‰ã‘pS’Šˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@pÓ &ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ@ 7YT´LD@D@D@D@D@D@D@z€€„›è5AD@D@D@D@D@D@D@²H¸É¢¢e" " " " " " " "Ð$Üô@'¨ " " " " " " " "E@ÂM- á¦:AM,n²¨h™ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ˆ€ôÿKklÕÑV¨IEND®B`‚python-semantic-release-10.4.1/docs/configuration/configuration-guides/monorepos.rst000066400000000000000000000321611506116242600310420ustar00rootroot00000000000000.. _monorepos: Releasing Packages from a Monorepo ================================== A monorepo (mono-repository) is a software development strategy where code for multiple projects is stored in a single source control system. This approach streamlines and consolidates configuration, but introduces complexities when using automated tools like Python Semantic Release (PSR). Previously, PSR offered limited compatibility with monorepos. As of v10.4.0, PSR introduces the :ref:`commit_parser-builtin-conventional-monorepo`, designed specifically for monorepo environments. To fully leverage this new parser, you must configure your monorepo as described below. .. _monorepos-config: Configuring PSR --------------- .. _monorepos-config-example_simple: Example: Simple """"""""""""""" **Directory Structure**: PSR does not yet support a single, workspace-level configuration definition. This means each package in the monorepo requires its own PSR configuration file. A compatible and common monorepo file structure looks like: .. code:: project/ ├── .git/ ├── .venv/ ├── packages/ │ ├── pkg1/ │ │ ├── docs/ │ │ │ └── source/ │ │ │ ├── conf.py │ │ │ └── index.rst │ │ │ │ │ ├── src/ │ │ │ └── pkg1/ │ │ │ ├── __init__.py │ │ │ ├── __main__.py │ │ │ └── py.typed │ │ │ │ │ ├── CHANGELOG.md │ │ ├── README.md │ │ └── pyproject.toml <-- PSR Configuration for Package 1 │ │ │ └── pkg2/ │ ├── docs/ │ │ └── source/ │ │ ├── conf.py │ │ └── index.rst │ ├── src/ │ │ └── pkg2/ │ │ ├── __init__.py │ │ ├── __main__.py │ │ └── py.typed │ │ │ ├── CHANGELOG.md │ ├── README.md │ └── pyproject.toml <-- PSR Configuration for Package 2 │ ├── .gitignore └── README.md This is the most basic monorepo structure, where each package is self-contained with its own configuration files, documentation, and CHANGELOG files. To release a package, change your current working directory to the package directory and execute PSR's :ref:`cmd-version`. PSR will automatically read the package's ``pyproject.toml``, looking for the ``[tool.semantic_release]`` section to determine the package's versioning and release configuration, then search up the file tree to find the Git repository. Because there is no workspace-level configuration, you must duplicate any common PSR configuration in each package's configuration file. Customize each configuration for each package to specify how PSR should distinguish between commits. With the example file structure above, here is an example configuration file for each package: .. code-block:: toml # FILE: pkg1/pyproject.toml [project] name = "pkg1" version = "1.0.0" [tool.semantic_release] commit_parser = "conventional-monorepo" commit_message = """\ chore(release): pkg1@{version}` Automatically generated by python-semantic-release """ tag_format = "pkg1-v{version}" version_toml = ["pyproject.toml:project.version"] [tool.semantic_release.commit_parser_options] path_filters = ["."] scope_prefix = "pkg1-" .. code-block:: toml # FILE: pkg2/pyproject.toml [project] name = "pkg2" version = "1.0.0" [tool.semantic_release] commit_parser = "conventional-monorepo" commit_message = """\ chore(release): pkg2@{version}` Automatically generated by python-semantic-release """ tag_format = "pkg2-v{version}" version_toml = ["pyproject.toml:project.version"] [tool.semantic_release.commit_parser_options] path_filters = ["."] scope_prefix = "pkg2-" These are the minimum configuration options required for each package. Note the use of :ref:`config-tag_format` to distinguish tags between packages. The commit parser options are specific to the new :ref:`commit_parser-builtin-conventional-monorepo` and play a significant role in identifying which commits are relevant to each package. Since you are expected to change directories to each package before releasing, file paths in each configuration file should be relative to the package directory. Each package also defines a slightly different :ref:`config-commit_message` to reflect the package name in each message. This helps clarify which release number is being updated in the commit history. Release Steps ''''''''''''' Given the following Git history of a monorepo using a GitHub Flow branching strategy (without CI/CD): .. image:: ./monorepos-ex-easy-before-release.png To manually release both packages, run: .. code-block:: bash cd packages/pkg1 semantic-release version # 1.0.1 (tag: pkg1-v1.0.1) cd ../pkg2 semantic-release version # 1.1.0 (tag: pkg2-v1.1.0) After releasing both packages, the resulting Git history will look like: .. image:: ./monorepos-ex-easy-post-release.png .. seealso:: - :ref:`GitHub Actions with Monorepos ` Considerations '''''''''''''' 1. **Custom Changelogs**: Managing changelogs can be tricky depending on where you want to write the changelog files. In this simple example, the changelog is located within each package directory, and the changelog template does not have any package-specific formatting or naming convention. You can use one shared template directory at the root of the project and configure each package to point to the shared template directory. .. code-block:: toml # FILE: pkg1/pyproject.toml [tool.semantic_release] template_dir = "../../config/release-templates" .. code-block:: toml # FILE: pkg2/pyproject.toml [tool.semantic_release] template_dir = "../../config/release-templates" .. code:: project/ ├── .git/ ├── config/ │ └── release-templates/ │ ├── CHANGELOG.md.j2 │ └── .release_notes.md.j2 ├── packages/ │ ├── pkg1/ │ │ ├── CHANGELOG.md │ │ └── pyproject.toml │ │ │ └── pkg2/ │ ├── CHANGELOG.md │ └── pyproject.toml │ ├── .gitignore └── README.md .. seealso:: - For situations with more complex documentation needs, see our :ref:`Advanced Example `. 2. **Package Prereleases**: Creating pre-releases is possible, but it is recommended to use package-prefixed branch names to avoid collisions between packages. For example, to enable alpha pre-releases for new features in both packages, use the following configuration: .. code-block:: toml # FILE: pkg1/pyproject.toml [tool.semantic_release.branches.alpha-release] match = "^pkg1/feat/.+" # <-- note pkg1 prefix prerelease = true prerelease_token = "alpha" .. code-block:: toml # FILE: pkg2/pyproject.toml [tool.semantic_release.branches.alpha-release] match = "^pkg2/feat/.+" # <-- note pkg2 prefix prerelease = true prerelease_token = "alpha" ---- .. _monorepos-config-example_advanced: Example: Advanced """"""""""""""""" If you want to consolidate documentation into a single top-level directory, the setup becomes more complex. In this example, there is a common documentation folder at the top level, and each package has its own subfolder within the documentation folder. Due to naming conventions, PSR cannot automatically accomplish this with its default changelog templates. For this scenario, you must copy the internal PSR templates into a custom directory (even if you do not modify them) and add custom scripting to prepare for each release. The directory structure looks like: .. code:: project/ ├── .git/ ├── docs/ │ ├── source/ │ │ ├── pkg1/ │ │ │ ├── changelog.md │ │ │ └── README.md │ │ ├── pkg2/ │ │ │ ├── changelog.md │ │ │ └── README.md │ │ └── index.rst │ │ │ └── templates/ │ ├── .base_changelog_template/ │ │ ├── components/ │ │ │ ├── changelog_header.md.j2 │ │ │ ├── changelog_init.md.j2 │ │ │ ├── changelog_update.md.j2 │ │ │ ├── changes.md.j2 │ │ │ ├── first_release.md.j2 │ │ │ ├── macros.md.j2 │ │ │ ├── unreleased_changes.md.j2 │ │ │ └── versioned_changes.md.j2 │ │ └── changelog.md.j2 │ ├── .gitignore │ └── .release_notes.md.j2 │ ├── packages/ │ ├── pkg1/ │ │ ├── src/ │ │ │ └── pkg1/ │ │ │ ├── __init__.py │ │ │ └── __main__.py │ │ └── pyproject.toml │ │ │ └── pkg2/ │ ├── src/ │ │ └── pkg2/ │ │ ├── __init__.py │ │ └── __main__.py │ └── pyproject.toml │ └── scripts/ ├── release-pkg1.sh └── release-pkg2.sh Each package should point to the ``docs/templates/`` directory to use a common release notes template. PSR ignores hidden files and directories when searching for template files to create, allowing you to hide shared templates in the directory for use in your release setup script. Here is our configuration file for package 1 (package 2 is similarly defined): .. code-block:: toml # FILE: pkg1/pyproject.toml [project] name = "pkg1" version = "1.0.0" [tool.semantic_release] commit_parser = "conventional-monorepo" commit_message = """\ chore(release): Release `pkg1@{version}` Automatically generated by python-semantic-release """ tag_format = "pkg1-v{version}" version_toml = ["pyproject.toml:project.version"] [tool.semantic_release.commit_parser_options] path_filters = [ ".", "../../../docs/source/pkg1/**", ] scope_prefix = "pkg1-" [tool.semantic_release.changelog] template_dir = "../../../docs/templates" mode = "update" exclude_commit_patterns = [ '''^chore(?:\([^)]*?\))?: .+''', '''^ci(?:\([^)]*?\))?: .+''', '''^refactor(?:\([^)]*?\))?: .+''', '''^style(?:\([^)]*?\))?: .+''', '''^test(?:\([^)]*?\))?: .+''', '''^Initial [Cc]ommit''', ] [tool.semantic_release.changelog.default_templates] # To enable update mode: this value must set here because the default is not the # same as the default in the other package & must be the final destination filename # for the changelog relative to this file changelog_file = "../../../docs/source/pkg1/changelog.md" Note: In this configuration, we added path filters for additional documentation files related to the package so that the changelog will include documentation changes as well. Next, define a release script to set up the common changelog templates in the correct directory format so PSR will create the desired files at the proper locations. Following the :ref:`changelog-templates-template-rendering` reference, you must define the folder structure from the root of the project within the templates directory so PSR will properly lay down the files across the repository. The script cleans up any previous templates, dynamically creates the necessary directories, and copies over the shared templates into a package-named directory. Now you are prepared to run PSR for a release of ``pkg1``. .. code-block:: bash #!/bin/bash # FILE: scripts/release-pkg1.sh set -euo pipefail PROJECT_ROOT="$(dirname "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")")" VIRTUAL_ENV="$PROJECT_ROOT/.venv" PACKAGE_NAME="pkg1" cd "$PROJECT_ROOT" || exit 1 # Setup documentation template pushd "docs/templates" >/dev/null || exit 1 rm -rf docs/ mkdir -p "docs/source/" cp -r .base_changelog_template/ "docs/source/$PACKAGE_NAME" popd >/dev/null || exit 1 # Release the package pushd "packages/$PACKAGE_NAME" >/dev/null || exit 1 printf '%s\n' "Releasing $PACKAGE_NAME..." "$VIRTUAL_ENV/bin/semantic-release" -v version --no-push popd >/dev/null || exit 1 That's it! This example demonstrates how to set up a monorepo with shared changelog templates and a consolidated documentation folder for multiple packages. .. seealso:: - Advanced Example Monorepo: `codejedi365/psr-monorepo-poweralpha `_ python-semantic-release-10.4.1/docs/configuration/configuration-guides/uv_integration.rst000066400000000000000000000274521506116242600320650ustar00rootroot00000000000000.. _config-guides-uv_integration: Ultraviolet (``uv``) Integration ================================ .. _uv: https://docs.astral.sh/uv/ `uv`_ is an extremely fast Python package and project manager that provides a modern alternative to `pip `_ and `venv `_. It provides a lot of features that solve the common problems of Python package management but it also introduces a few quirks that need to be taken into account when using Python Semantic Release. .. important:: **Prerequisite:** Make sure you have run through the :ref:`Getting Started Guide ` before proceeding with this guide. Updating the ``uv.lock`` ------------------------ One of the best features of ``uv`` is that it automatically generates a lock file (``uv.lock``) that contains the exact versions of all the dependencies used in your project. The lock file is generated when you run the ``uv install`` command, and it is used to ensure that CI workflows are repeatable and development environments are consistent. When creating a new release using Python Semantic Release, PSR will update the version in the project's definition file (e.g., ``pyproject.toml``) to indicate the new version. Unfortunately, this action will cause ``uv`` to fail on the next execution because the lock file will be out of sync with the project's definition file. There are two ways to resolve this issue depending on your preference: #. **Add a step to your build command**: Modify your :ref:`semantic_release.build_command ` to include the command to update the lock file and stage it for commit. This is commonly used with the :ref:`GitHub Action ` and other CI/CD tools when you are building the artifact at the time of release. .. code-block:: toml [tool.semantic_release] build_command = """ uv lock --upgrade-package "$PACKAGE_NAME" git add uv.lock uv build """ The intent of the lock upgrade-package call is **ONLY** to update the version of your project within the lock file after PSR has updated the version in your project's definition file (e.g., ``pyproject.toml``). When you are running PSR, you have already tested the project as is and you don't want to actually update the dependencies if a new one just became available. For ease of use, PSR provides the ``$PACKAGE_NAME`` environment variable that contains the name of your package from the project's definition file (``pyproject.toml:project.name``). If you are using the :ref:`PSR GitHub Action `, you will need to add an installation command for ``uv`` to the :ref:`build_command ` because the action runs in a Docker environment does not include ``uv`` by default. The best way to ensure that the correct version of ``uv`` is installed is to define the version of ``uv`` in an optional dependency list (e.g. ``build``). This will also help with other automated tools like Dependabot or Renovate to keep the version of ``uv`` up to date. .. code-block:: toml [project.optional-dependencies] build = ["uv ~= 0.7.12"] [tool.semantic_release] build_command = """ python -m pip install -e '.[build]' uv lock --upgrade-package "$PACKAGE_NAME" git add uv.lock uv build """ #. **Stamp the code first & then separately run release**: If you prefer to not modify the build command, then you will need to run the ``uv lock --upgrade-package `` command prior to actually creating the release. Essentially, you will run PSR twice: (1) once to update the version in the project's definition file, and (2) a second time to generate the release. The intent of the ``uv lock --upgrade-package `` command is **ONLY** to update the version of your project within the lock file after PSR has updated the version in your project's definition file (e.g., ``pyproject.toml``). When you are running PSR, you have already tested the project as is and you don't want to actually update the dependencies if a new one just became available. .. code-block:: bash # 1. PSR stamps version into files (nothing else) # don't build the changelog (especially in update mode) semantic-release -v version --skip-build --no-commit --no-tag --no-changelog # 2. run UV lock as pyproject.toml is updated with the next version uv lock --upgrade-package # 3. stage the lock file to ensure it is included in the PSR commit git add uv.lock # 4. run PSR fully to create release semantic-release -v version **Advanced Example** Of course, you can mix and match these 2 approaches as needed. If PSR's pipeline was using ``uv``, we would have a mixture of the 2 approaches because we run the build in a separate job from the release. In our case, PSR would also need to carry the lock file as a workflow artifact along the pipeline for the release job to commit it. This advanced workflow would look like this: .. code-block:: text # File: .tool-versions uv 0.7.12 .. code-block:: text # File: .python-version 3.11.11 .. code-block:: toml # File: pyproject.toml [project.optional-dependencies] build = ["python-semantic-release ~= 10.0"] test = ["pytest ~= 8.0"] [tool.semantic_release] build_command = """ uv lock --upgrade-package "$PACKAGE_NAME" uv build """ .. code-block:: yaml # File: .github/workflows/release.yml on: push: branches: - main jobs: build: runs-on: ubuntu-latest permissions: contents: read env: dist_artifacts_name: dist dist_artifacts_dir: dist lock_file_artifact: uv.lock steps: - name: Setup | Checkout Repository at workflow sha uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.sha }} fetch-depth: 0 - name: Setup | Force correct release branch on workflow sha run: git checkout -B ${{ github.ref_name }} - name: Setup | Install uv uses: asdf-vm/actions/install@1902764435ca0dd2f3388eea723a4f92a4eb8302 # v4.0.2 - name: Setup | Install Python & Project dependencies run: uv sync --extra build - name: Build | Build next version artifacts id: version env: GH_TOKEN: "none" run: uv run semantic-release -v version --no-changelog --no-commit --no-tag - name: Upload | Distribution Artifacts if: ${{ steps.version.outputs.released == 'true' }} uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: ${{ env.dist_artifacts_name }} path: ${{ format('{0}/**', env.dist_artifacts_dir) }} if-no-files-found: error retention-days: 2 - name: Upload | Lock File Artifact if: ${{ steps.version.outputs.released == 'true' }} uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: ${{ env.lock_file_artifact }} path: ${{ env.lock_file_artifact }} if-no-files-found: error retention-days: 2 outputs: new-release-detected: ${{ steps.version.outputs.released }} new-release-version: ${{ steps.version.outputs.version }} new-release-tag: ${{ steps.version.outputs.tag }} new-release-is-prerelease: ${{ steps.version.outputs.is_prerelease }} distribution-artifacts: ${{ env.dist_artifacts_name }} lock-file-artifact: ${{ env.lock_file_artifact }} test-e2e: needs: build runs-on: ubuntu-latest steps: - name: Setup | Checkout Repository uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.sha }} fetch-depth: 1 - name: Setup | Download Distribution Artifacts uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 if: ${{ needs.build.outputs.new-release-detected == 'true' }} id: artifact-download with: name: ${{ needs.build.outputs.distribution-artifacts }} path: ./dist - name: Setup | Install uv uses: asdf-vm/actions/install@1902764435ca0dd2f3388eea723a4f92a4eb8302 # v4.0.2 - name: Setup | Install Python & Project dependencies run: uv sync --extra test - name: Setup | Install distribution artifact if: ${{ steps.artifact-download.outcome == 'success' }} run: | uv pip uninstall my-package uv pip install dist/python_semantic_release-*.whl - name: Test | Run pytest run: uv run pytest -vv tests/e2e release: runs-on: ubuntu-latest needs: - build - test-e2e if: ${{ needs.build.outputs.new-release-detected == 'true' }} concurrency: group: ${{ github.workflow }}-release-${{ github.ref_name }} cancel-in-progress: false permissions: contents: write steps: - name: Setup | Checkout Repository on Release Branch uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.ref_name }} fetch-depth: 0 - name: Setup | Force release branch to be at workflow sha run: git reset --hard ${{ github.sha }} - name: Setup | Install uv uses: asdf-vm/actions/install@1902764435ca0dd2f3388eea723a4f92a4eb8302 # v4.0.2 - name: Setup | Install Python & Project dependencies run: uv sync --extra build - name: Setup | Download Build Artifacts uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 id: artifact-download with: name: ${{ needs.build.outputs.distribution-artifacts }} path: dist - name: Setup | Download Lock File Artifact uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: ${{ needs.build.outputs.lock-file-artifact }} - name: Setup | Stage Lock File for Version Commit run: git add uv.lock - name: Release | Create Release id: release shell: bash env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | bash .github/workflows/verify_upstream.sh uv run semantic-release -v --strict version --skip-build uv run semantic-release publish outputs: released: ${{ steps.release.outputs.released }} new-release-version: ${{ steps.release.outputs.version }} new-release-tag: ${{ steps.release.outputs.tag }} deploy: name: Deploy runs-on: ubuntu-latest if: ${{ needs.release.outputs.released == 'true' && github.repository == 'python-semantic-release/my-package' }} needs: - build - release environment: name: pypi url: https://pypi.org/project/my-package/ permissions: id-token: write steps: - name: Setup | Download Build Artifacts uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 id: artifact-download with: name: ${{ needs.build.outputs.distribution-artifacts }} path: dist - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4 with: packages-dir: dist print-hash: true verbose: true python-semantic-release-10.4.1/docs/configuration/configuration.rst000066400000000000000000001352041506116242600255450ustar00rootroot00000000000000.. _config: Configuration ============= Configuration is read from a file which can be specified using the :ref:`\\\\-\\\\-config ` option to :ref:`cmd-main`. Python Semantic Release currently supports a configuration in either TOML or JSON format, and will attempt to auto-detect and parse either format. When using a JSON-format configuration file, Python Semantic Release looks for its settings beneath a top-level ``semantic_release`` key; when using a TOML-format configuration file, Python Semantic Release first checks for its configuration under the table ``[tool.semantic_release]`` (in line with the convention for Python tools to require their configuration under the top-level ``tool`` table in their ``pyproject.toml`` file), followed by ``[semantic_release]``, which may be more desirable if using a file other than the default ``pyproject.toml`` for configuration. The examples on this page are given in TOML format, however there is no limitation on using JSON instead. In fact, if you would like to convert any example below to its JSON equivalent, the following commands will do this for you (in Bash): .. code-block:: bash export TEXT="" cat <`, such as ``GH_TOKEN``, in your configuration file, and Python Semantic Release will do the rest, as seen below. .. code-block:: toml [semantic_release.remote.token] env = "GH_TOKEN" Given basic TOML syntax compatibility, this is equivalent to: .. code-block:: toml [semantic_release.remote] token = { env = "GH_TOKEN" } The general format for specifying that some configuration should be sourced from an environment variable is: .. code-block:: toml [semantic_release.variable] env = "ENV_VAR" default_env = "FALLBACK_ENV_VAR" default = "default value" In this structure: * ``env`` represents the environment variable that Python Semantic Release will search for * ``default_env`` is a fallback environment variable to read in case the variable specified by ``env`` is not set. This is optional - if not specified then no fallback will be used. * ``default`` is a default value to use in case the environment variable specified by ``env`` is not set. This is optional - if ``default`` is not specified then the environment variable specified by ``env`` is considered required. .. _config-root: ``semantic_release`` settings ----------------------------- The following sections outline all the definitions and descriptions of each supported configuration setting. If there are type mis-matches, PSR will throw validation errors upon load. If a setting is not provided, than PSR will fill in the value with the default value. Python Semantic Release expects a root level key to start the configuration definition. Make sure to use the correct root key depending on the configuration format you are using. .. note:: If you are using ``pyproject.toml``, this heading should include the ``tool`` prefix as specified within PEP 517, resulting in ``[tool.semantic_release]``. .. note:: If you are using a ``releaserc.toml``, use ``[semantic_release]`` as the root key .. note:: If you are using a ``releaserc.json``, ``semantic_release`` must be the root key in the top level dictionary. ---- .. _config-allow_zero_version: ``allow_zero_version`` """""""""""""""""""""" *Introduced in v9.2.0* **Type:** ``bool`` This flag controls whether or not Python Semantic Release will use version numbers aligning with the ``0.x.x`` pattern. If set to ``true`` and starting at ``0.0.0``, a minor bump would set the next version as ``0.1.0`` whereas a patch bump would set the next version as ``0.0.1``. A breaking change (ie. major bump) would set the next version as ``1.0.0`` unless the :ref:`config-major_on_zero` is set to ``false``. If set to ``false``, Python Semantic Release will consider the first possible version to be ``1.0.0``, regardless of patch, minor, or major change level. Additionally, when ``allow_zero_version`` is set to ``false``, the :ref:`config-major_on_zero` setting is ignored. *Default changed to ``false`` in v10.0.0* **Default:** ``false`` ---- .. _config-assets: ``assets`` """""""""" **Type:** ``list[str]`` One or more paths to additional assets that should committed to the remote repository in addition to any files modified by writing the new version. **Default:** ``[]`` ---- .. _config-branches: ``branches`` """""""""""" This setting is discussed in more detail at :ref:`multibranch-releases` **Default:** .. code-block:: toml [semantic_release.branches.main] match = "(main|master)" prerelease_token = "rc" prerelease = false ---- .. _config-build_command: ``build_command`` """"""""""""""""" **Type:** ``Optional[str]`` Command to use to build the current project during :ref:`cmd-version`. Python Semantic Release will execute the build command in the OS default shell with a subset of environment variables. PSR provides the variable ``NEW_VERSION`` in the environment with the value of the next determined version. The following table summarizes all the environment variables that are passed on to the ``build_command`` runtime if they exist in the parent process. If you would like to pass additional environment variables to your build command, see :ref:`config-build_command_env`. ======================== ====================================================================== Variable Name Description ======================== ====================================================================== CI Pass-through ``true`` if exists in process env, unset otherwise BITBUCKET_CI ``true`` if Bitbucket CI variables exist in env, unset otherwise GITHUB_ACTIONS Pass-through ``true`` if exists in process env, unset otherwise GITEA_ACTIONS Pass-through ``true`` if exists in process env, unset otherwise GITLAB_CI Pass-through ``true`` if exists in process env, unset otherwise HOME Pass-through ``HOME`` of parent process NEW_VERSION Semantically determined next version (ex. ``1.2.3``) PATH Pass-through ``PATH`` of parent process PACKAGE_NAME Project name as defined in ``pyproject.toml:project.name`` PSR_DOCKER_GITHUB_ACTION Pass-through ``true`` if exists in process env, unset otherwise VIRTUAL_ENV Pass-through ``VIRTUAL_ENV`` if exists in process env, unset otherwise ======================== ====================================================================== In addition, on windows systems these environment variables are passed: ======================== ====================================================================== Variable Name Description ======================== ====================================================================== ALLUSERSAPPDATA Pass-through ``ALLUSERAPPDATA`` if exists in process env, unset otherwise ALLUSERSPROFILE Pass-through ``ALLUSERSPPPROFILE`` if exists in process env, unset otherwise APPDATA Pass-through ``APPDATA`` if exists in process env, unset otherwise COMMONPROGRAMFILES Pass-through ``COMMONPROGRAMFILES`` if exists in process env, unset otherwise COMMONPROGRAMFILES(X86) Pass-through ``COMMONPROGRAMFILES(X86)`` if exists in process env, unset otherwise DEFAULTUSERPROFILE Pass-through ``DEFAULTUSERPROFILE`` if exists in process env, unset otherwise HOMEPATH Pass-through ``HOMEPATH`` if exists in process env, unset otherwise PATHEXT Pass-through ``PATHEXT`` if exists in process env, unset otherwise PROFILESFOLDER Pass-through ``PROFILESFOLDER`` if exists in process env, unset otherwise PROGRAMFILES Pass-through ``PROGRAMFILES`` if exists in process env, unset otherwise PROGRAMFILES(X86) Pass-through ``PROGRAMFILES(X86)`` if exists in process env, unset otherwise SYSTEM Pass-through ``SYSTEM`` if exists in process env, unset otherwise SYSTEM16 Pass-through ``SYSTEM16`` if exists in process env, unset otherwise SYSTEM32 Pass-through ``SYSTEM32`` if exists in process env, unset otherwise SYSTEMDRIVE Pass-through ``SYSTEMDRIVE`` if exists in process env, unset otherwise SYSTEMROOT Pass-through ``SYSTEMROOT`` if exists in process env, unset otherwise TEMP Pass-through ``TEMP`` if exists in process env, unset otherwise TMP Pass-through ``TMP`` if exists in process env, unset otherwise USERPROFILE Pass-through ``USERPROFILE`` if exists in process env, unset otherwise USERSID Pass-through ``USERSID`` if exists in process env, unset otherwise WINDIR Pass-through ``WINDIR`` if exists in process env, unset otherwise ======================== ====================================================================== **Default:** ``None`` (not specified) ---- .. _config-build_command_env: ``build_command_env`` """"""""""""""""""""" *Introduced in v9.7.2* **Type:** ``Optional[list[str]]`` List of environment variables to include or pass-through on to the build command that executes during :ref:`cmd-version`. This configuration option allows the user to extend the list of environment variables from the table above in :ref:`config-build_command`. The input is a list of strings where each individual string handles a single variable definition. There are two formats accepted and are detailed in the following table: ================== =================================================================== FORMAT Description ================== =================================================================== ``VAR_NAME`` Detects value from the PSR process environment, and passes value to ``build_command`` process ``VAR_NAME=value`` Sets variable name to value inside of ``build_command`` process ================== =================================================================== .. note:: Although variable name capitalization is not required, it is recommended as to be in-line with the POSIX-compliant recommendation for shell variable names. **Default:** ``None`` (not specified) ---- .. _config-changelog: ``changelog`` """"""""""""" This section outlines the configuration options available that modify changelog generation. .. note:: **pyproject.toml:** ``[tool.semantic_release.changelog]`` **releaserc.toml:** ``[semantic_release.changelog]`` **releaserc.json:** ``{ "semantic_release": { "changelog": {} } }`` ---- .. _config-changelog-changelog_file: ``changelog_file`` ****************** .. warning:: *Deprecated in v9.11.0.* This setting has been moved to :ref:`changelog.default_templates.changelog_file ` for a more logical grouping. This setting will be removed in a future major release. **Type:** ``str`` Specify the name of the changelog file that will be created. This file will be created or overwritten (if it previously exists) with the rendered default template included with Python Semantic Release. Depending on the file extension of this setting, the changelog will be rendered in the format designated by the extension. PSR, as of v9.11.0, provides a default changelog template in both Markdown (``.md``) and reStructuredText (``.rst``) formats. If the file extension is not recognized, the changelog will be rendered in Markdown format, unless the :ref:`config-changelog-default_templates-output_format` setting is set. If you are using the ``template_dir`` setting for providing customized templates, this setting is not used. See :ref:`config-changelog-template_dir` for more information. **Default:** ``"CHANGELOG.md"`` ---- .. _config-changelog-default_templates: ``default_templates`` ********************* .. note:: This section of the configuration contains options which customize or modify the default changelog templates included with PSR. **pyproject.toml:** ``[tool.semantic_release.changelog.default_templates]`` **releaserc.toml:** ``[semantic_release.changelog.default_templates]`` **releaserc.json:** ``{ "semantic_release": { "changelog": { "default_templates": {} } } }`` ---- .. _config-changelog-default_templates-changelog_file: ``changelog_file`` '''''''''''''''''' *Introduced in v9.11.0.* **Type:** ``str`` Specify the name of the changelog file that will be created. This file will be created or overwritten (if it previously exists) with the rendered default template included with Python Semantic Release. Depending on the file extension of this setting, the changelog will be rendered in the format designated by the extension. PSR, as of v9.11.0, provides a default changelog template in both Markdown (``.md``) and reStructuredText (``.rst``) formats. If the file extension is not recognized, the changelog will be rendered in Markdown format, unless the :ref:`config-changelog-default_templates-output_format` setting is set. If you are using the ``template_dir`` setting for providing customized templates, this setting is not used. See :ref:`config-changelog-template_dir` for more information. **Default:** ``"CHANGELOG.md"`` ---- .. _config-changelog-default_templates-mask_initial_release: ``mask_initial_release`` '''''''''''''''''''''''' *Introduced in v9.14.0* **Type:** ``bool`` This option toggles the behavior of the changelog and release note templates to mask the release details specifically for the first release. When set to ``true``, the first version release notes will be masked with a generic message as opposed to the usual commit details. When set to ``false``, the release notes will be generated as normal. The reason for this setting is to improve clarity to your audience. It conceptually does **NOT** make sense to have a list of changes (i.e. a Changelog) for the first release since nothing has been published yet, therefore in the eyes of your consumers what change is there to document? The message details can be found in the ``first_release.md.j2`` and ``first_release.rst.j2`` templates of the default changelog template directory. *Default changed to ``true`` in v10.0.0.* **Default:** ``true`` .. seealso:: - :ref:`changelog-templates-default_changelog` ---- .. _config-changelog-default_templates-output_format: ``output_format`` ''''''''''''''''' *Introduced in v9.10.0* **Type:** ``Literal["md", "rst"]`` This setting is used to specify the output format the default changelog template will use when rendering the changelog. PSR supports both Markdown (``md``) and reStructuredText (``rst``) formats. This setting will take precedence over the file extension of the :ref:`config-changelog-default_templates-changelog_file` setting. If this setting is omitted, the file extension of the :ref:`config-changelog-default_templates-changelog_file` setting will be used to determine the output format. If the file extension is not recognized, the output format will default to Markdown. **Default:** ``"md"`` .. seealso:: - :ref:`config-changelog-default_templates-changelog_file` ---- .. _config-changelog-environment: ``environment`` *************** .. note:: This section of the configuration contains options which customize the template environment used to render templates such as the changelog. Most options are passed directly to the `jinja2.Environment`_ constructor, and further documentation one these parameters can be found there. **pyproject.toml:** ``[tool.semantic_release.changelog.environment]`` **releaserc.toml:** ``[semantic_release.changelog.environment]`` **releaserc.json:** ``{ "semantic_release": { "changelog": { "environment": {} } } }`` .. _`jinja2.Environment`: https://jinja.palletsprojects.com/en/3.1.x/api/#jinja2.Environment ---- .. _config-changelog-environment-autoescape: ``autoescape`` '''''''''''''' **Type:** ``Union[str, bool]`` If this setting is a string, it should be given in ``module:attr`` form; Python Semantic Release will attempt to dynamically import this string, which should represent a path to a suitable callable that satisfies the following: As of Jinja 2.4 this can also be a callable that is passed the template name and has to return ``true`` or ``false`` depending on autoescape should be enabled by default. The result of this dynamic import is passed directly to the `jinja2.Environment`_ constructor. If this setting is a boolean, it is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``false`` ---- .. _config-changelog-environment-block_start_string: ``block_start_string`` '''''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"{%"`` ---- .. _config-changelog-environment-block_end_string: ``block_end_string`` '''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"%}"`` ---- .. _config-changelog-environment-comment_start_string: ``comment_start_string`` '''''''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``{#`` ---- .. _config-changelog-environment-comment_end_string: ``comment_end_string`` '''''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"#}"`` ---- .. _config-changelog-environment-extensions: ``extensions`` '''''''''''''' **Type:** ``list[str]`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``[]`` ---- .. _config-changelog-environment-keep_trailing_newline: ``keep_trailing_newline`` ''''''''''''''''''''''''' **Type:** ``bool`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``false`` ---- .. _config-changelog-environment-line_comment_prefix: ``line_comment_prefix`` ''''''''''''''''''''''' **Type:** ``Optional[str]`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``None`` (not specified) ---- .. _config-changelog-environment-line_statement_prefix: ``line_statement_prefix`` ''''''''''''''''''''''''' **Type:** ``Optional[str]`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``None`` (not specified) ---- .. _config-changelog-environment-lstrip_blocks: ``lstrip_blocks`` ''''''''''''''''' **Type:** ``bool`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``false`` ---- .. _config-changelog-environment-newline_sequence: ``newline_sequence`` '''''''''''''''''''' **Type:** ``Literal["\n", "\r", "\r\n"]`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"\n"`` ---- .. _config-changelog-environment-trim_blocks: ``trim_blocks`` ''''''''''''''' **Type:** ``bool`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``false`` ---- .. _config-changelog-environment-variable_start_string: ``variable_start_string`` ''''''''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"{{"`` ---- .. _config-changelog-environment-variable_end_string: ``variable_end_string`` ''''''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"}}"`` ---- .. _config-changelog-exclude_commit_patterns: ``exclude_commit_patterns`` *************************** **Type:** ``list[str]`` Any patterns specified here will be excluded from the commits which are available to your changelog. This allows, for example, automated commits to be removed if desired. Python Semantic Release also removes its own commits from the Changelog via this mechanism; therefore if you change the automated commit message that Python Semantic Release uses when making commits, you may wish to add the *old* commit message pattern here. The patterns in this list are treated as regular expressions. **Default:** ``[]`` ---- .. _config-changelog-mode: ``mode`` ******** *Introduced in v9.10.0. Default changed to `update` in v10.0.0.* **Type:** ``Literal["init", "update"]`` This setting is a flag that is ultimately passed into the changelog context environment. It sets the value of ``context.changelog_mode`` to a string value of either ``init`` or ``update``. When used with the provided changelog template, it will determine the behavior of how the changelog is written. When the mode is set to ``init``, the changelog file will be written from scratch, overwriting any existing changelog file. This is the ``v8`` and ``v9`` default behavior. When the mode is set to ``update``, the changelog file will look for the ``insertion_flag`` value in the changelog file (defined by :ref:`config-changelog-changelog_file`) and insert the new version information at that location. If you are using a custom template directory, the `context.changelog_mode` value will exist in the changelog context but it is up to your implementation to determine if and/or how to use it. **Default:** ``update`` .. seealso:: - :ref:`changelog-templates-default_changelog` ---- .. _config-changelog-insertion_flag: ``insertion_flag`` ****************** *Introduced in v9.10.0* **Type:** ``str`` A string that will be used to identify where the new version should be inserted into the changelog file (as defined by :ref:`config-changelog-changelog_file`) when the changelog mode is set to ``update``. If you modify this value in your config, you will need to manually update any saved changelog file to match the new insertion flag if you use the ``update`` mode. In ``init`` mode, the changelog file will be overwritten as normal. In v9.11.0, the ``insertion_flag`` default value became more dynamic with the introduction of an reStructuredText template. The default value will be set depending on the :ref:`config-changelog-default_templates-output_format` setting. The default flag values are: ================== ========================= Output Format Default Insertion Flag ================== ========================= Markdown (``md``) ```` reStructuredText ``..\n version list`` ================== ========================= **Default:** various, see above ---- .. _config-changelog-template_dir: ``template_dir`` **************** **Type:** ``str`` When files exist within the specified directory, they will be used as templates for the changelog rendering process. Regardless if the directory includes a changelog file, the provided directory will be rendered and files placed relative to the root of the project directory. No default changelog template or release notes template will be used when this directory exists and the directory is not empty. If the directory is empty, the default changelog template will be used. This option is discussed in more detail at :ref:`changelog-templates` **Default:** ``"templates"`` ---- .. _config-commit_author: ``commit_author`` """"""""""""""""" **Type:** ``str`` Author used in commits in the format ``name ``. .. note:: If you are using the built-in GitHub Action, the default value is set to ``github-actions ``. You can modify this with the ``git_committer_name`` and ``git_committer_email`` inputs. .. seealso:: - :ref:`gh_actions` **Default:** ``semantic-release `` ---- .. _config-commit_message: ``commit_message`` """""""""""""""""" **Type:** ``str`` Commit message to use when making release commits. The message can use ``{version}`` as a format key, in which case the version being released will be formatted into the message. If at some point in your project's lifetime you change this, you may wish to consider, adding the old message pattern(s) to :ref:`exclude_commit_patterns `. **Default:** ``"{version}\n\nAutomatically generated by python-semantic-release"`` ---- .. _config-commit_parser: ``commit_parser`` """"""""""""""""" **Type:** ``str`` Specify which commit parser Python Semantic Release should use to parse the commits within the Git repository. Built-in parsers: * ``angular`` - :ref:`AngularCommitParser ` *(deprecated in v9.19.0)* * ``conventional`` - :ref:`ConventionalCommitParser ` *(available in v9.19.0+)* * ``conventional-monorepo`` - :ref:`ConventionalCommitMonorepoParser ` *(available in v10.4.0+)* * ``emoji`` - :ref:`EmojiCommitParser ` * ``scipy`` - :ref:`ScipyCommitParser ` * ``tag`` - :ref:`TagCommitParser ` *(deprecated in v9.12.0)* You can set any of the built-in parsers by their keyword but you can also specify your own commit parser in ``path/to/module_file.py:Class`` or ``module:Class`` form. For more information see :ref:`commit_parsing`. **Default:** ``"conventional"`` ---- .. _config-commit_parser_options: ``commit_parser_options`` """"""""""""""""""""""""" **Type:** ``dict[str, Any]`` This set of options are passed directly to the commit parser class specified in :ref:`the commit parser ` configuration option. For more information (to include defaults), see :ref:`commit_parser-builtin-customization`. **Default:** ``ParserOptions { ... }``, where ``...`` depends on :ref:`commit_parser `. ---- .. _config-logging_use_named_masks: ``logging_use_named_masks`` """"""""""""""""""""""""""" **Type:** ``bool`` Whether or not to replace secrets identified in logging messages with named masks identifying which secrets were replaced, or use a generic string to mask them. **Default:** ``false`` ---- .. _config-major_on_zero: ``major_on_zero`` """"""""""""""""" **Type:** ``bool`` This flag controls whether or not Python Semantic Release will increment the major version upon a breaking change when the version matches ``0.y.z``. This value is set to ``true`` by default, where breaking changes will increment the ``0`` major version to ``1.0.0`` like normally expected. If set to ``false``, major (breaking) releases will increment the minor digit of the version while the major version is ``0``, instead of the major digit. This allows for continued breaking changes to be made while the major version remains ``0``. From the `Semantic Versioning Specification`_: Major version zero (0.y.z) is for initial development. Anything MAY change at any time. The public API SHOULD NOT be considered stable. .. _Semantic Versioning Specification: https://semver.org/spec/v2.0.0.html#spec-item-4 When you are ready to release a stable version, set ``major_on_zero`` to ``true`` and run Python Semantic Release again. This will increment the major version to ``1.0.0``. When :ref:`config-allow_zero_version` is set to ``false``, this setting is ignored. **Default:** ``true`` ---- .. _config-no_git_verify: ``no_git_verify`` """"""""""""""""" *Introduced in v9.8.0* **Type:** ``bool`` This flag is passed along to ``git`` upon performing a ``git commit`` during :ref:`cmd-version`. When true, it will bypass any git hooks that are set for the repository when Python Semantic Release makes a version commit. When false, the commit is performed as normal. This option has no effect when there are not any git hooks configured nor when the ``--no-commit`` option is passed. **Default:** ``false`` ---- .. _config-publish: ``publish`` """"""""""" This section defines configuration options that modify :ref:`cmd-publish`. .. note:: **pyproject.toml:** ``[tool.semantic_release.publish]`` **releaserc.toml:** ``[semantic_release.publish]`` **releaserc.json:** ``{ "semantic_release": { "publish": {} } }`` ---- .. _config-publish-dist_glob_patterns: ``dist_glob_patterns`` ********************** **Type:** ``list[str]`` Upload any files matching any of these globs to your VCS release. Each item in this list should be a string containing a Unix-style glob pattern. **Default:** ``["dist/*"]`` ---- .. _config-publish-upload_to_vcs_release: ``upload_to_vcs_release`` ************************* **Type:** ``bool`` If set to ``true``, upload any artifacts matched by the :ref:`dist_glob_patterns ` to the release created in the remote VCS corresponding to the latest tag. Artifacts are only uploaded if release artifact uploads are supported by the :ref:`VCS type `. **Default:** ``true`` ---- .. _config-remote: ``remote`` """""""""" The remote configuration is a group of settings that configure PSR's integration with remote version control systems. .. note:: **pyproject.toml:** ``[tool.semantic_release.remote]`` **releaserc.toml:** ``[semantic_release.remote]`` **releaserc.json:** ``{ "semantic_release": { "remote": {} } }`` ---- .. _config-remote-api_domain: ``api_domain`` ************** **Type:** ``Optional[str | Dict['env', str]]`` The hosting domain for the API of your remote HVCS if different than the ``domain``. Generally, this will be used to specify a separate subdomain that is used for API calls rather than the primary domain (ex. ``api.github.com``). **Most on-premise HVCS installations will NOT use this setting!** Whether or not this value is used depends on the HVCS configured (and your server administration) in the :ref:`remote.type ` setting and used in tandem with the :ref:`remote.domain ` setting. When using a custom :ref:`remote.domain ` and a HVCS :ref:`remote.type ` that is configured with a separate domain or sub-domain for API requests, this value is used to configure the location of API requests that are sent from PSR. Most on-premise or self-hosted HVCS environments will use a path prefix to handle inbound API requests, which means this value will ignored. PSR knows the expected api domains for known cloud services and their associated api domains which means this value is not necessary to explicitly define for services as ``bitbucket.org``, and ``github.com``. Including the protocol schemes, such as ``https://``, for the API domain is optional. Secure ``HTTPS`` connections are assumed unless the setting of :ref:`remote.insecure ` is ``true``. **Default:** ``None`` ---- .. _config-remote-domain: ``domain`` ********** **Type:** ``Optional[str | Dict['env', str]]`` The host domain for your HVCS server. This setting is used to support on-premise installations of HVCS providers with custom domain hosts. If you are using the official domain of the associated :ref:`remote.type `, this value is not required. PSR will use the default domain value for the :ref:`remote.type ` when not specified. For example, when ``remote.type="github"`` is specified the default domain of ``github.com`` is used. Including the protocol schemes, such as ``https://``, for the domain value is optional. Secure ``HTTPS`` connections are assumed unless the setting of :ref:`remote.insecure ` is ``true``. This setting also supports reading from an environment variable for ease-of-use in CI pipelines. See :ref:`Environment Variable ` for more information. Depending on the :ref:`remote.type `, the default environment variable for the default domain's CI pipeline environment will automatically be checked so this value is not required in default environments. For example, when ``remote.type="gitlab"`` is specified, PSR will look to the ``CI_SERVER_URL`` environment variable when ``remote.domain`` is not specified. **Default:** ``None`` .. seealso:: - :ref:`remote.api_domain ` ---- .. _config-remote-ignore_token_for_push: ``ignore_token_for_push`` ************************* **Type:** ``bool`` If set to ``true``, ignore the authentication token when pushing changes to the remote. This is ideal, for example, if you already have SSH keys set up which can be used for pushing. **Default:** ``false`` ---- .. _config-remote-insecure: ``insecure`` ************ *Introduced in v9.4.2* **Type:** ``bool`` Insecure is used to allow non-secure ``HTTP`` connections to your HVCS server. If set to ``true``, any domain value passed will assume ``http://`` if it is not specified and allow it. When set to ``false`` (implicitly or explicitly), it will force ``https://`` communications. When a custom ``domain`` or ``api_domain`` is provided as a configuration, this flag governs the protocol scheme used for those connections. If the protocol scheme is not provided in the field value, then this ``insecure`` option defines whether ``HTTP`` or ``HTTPS`` is used for the connection. If the protocol scheme is provided in the field value, it must match this setting or it will throw an error. The purpose of this flag is to prevent any typos in provided ``domain`` and ``api_domain`` values that accidentally specify an insecure connection but allow users to toggle the protection scheme off when desired. **Default:** ``false`` ---- .. _config-remote-name: ``name`` ******** **Type:** ``str`` Name of the remote to push to using ``git push -u $name `` **Default:** ``"origin"`` ---- .. _config-remote-token: ``token`` ********* **Type:** ``Optional[str | Dict['env', str]]`` :ref:`Environment Variable ` from which to source the authentication token for the remote VCS. Common examples include ``"GH_TOKEN"``, ``"GITLAB_TOKEN"`` or ``"GITEA_TOKEN"``, however, you may choose to use a custom environment variable if you wish. .. note:: By default, this is a **mandatory** environment variable that must be set before using any functionality that requires authentication with your remote VCS. If you are using this token to enable push access to the repository, it must also be set before attempting to push. If your push access is enabled via SSH keys instead, then you do not need to set this environment variable in order to push the version increment, changelog and modified source code assets to the remote using :ref:`cmd-version`. However, you will need to disable release creation using the :ref:`cmd-version-option-vcs-release` option, among other options, in order to use Python Semantic Release without configuring the environment variable for your remote VCS authentication token. The default value for this setting depends on what you specify as :ref:`remote.type `. Review the table below to see what the default token value will be for each remote type. ================ == =============================== ``remote.type`` Default ``remote.token`` ================ == =============================== ``"github"`` -> ``{ env = "GH_TOKEN" }`` ``"gitlab"`` -> ``{ env = "GITLAB_TOKEN" }`` ``"gitea"`` -> ``{ env = "GITEA_TOKEN" }`` ``"bitbucket"`` -> ``{ env = "BITBUCKET_TOKEN" }`` ================ == =============================== **Default:** ``{ env = "" }``, where ```` depends on :ref:`remote.type ` as indicated above. ---- .. _config-remote-type: ``type`` ******** **Type:** ``Literal["bitbucket", "gitea", "github", "gitlab"]`` The type of the remote VCS. Currently, Python Semantic Release supports ``"github"``, ``"gitlab"``, ``"gitea"`` and ``"bitbucket"``. Not all functionality is available with all remote types, but we welcome pull requests to help improve this! **Default:** ``"github"`` ---- .. _config-remote-url: ``url`` ******* **Type:** ``Optional[str | Dict['env', str]]`` An override setting used to specify the remote upstream location of ``git push``. **Not commonly used!** This is used to override the derived upstream location when the desired push location is different than the location the repository was cloned from. This setting will override the upstream location url that would normally be derived from the :ref:`remote.name ` location of your git repository. **Default:** ``None`` ---- .. _config-tag_format: ``tag_format`` """""""""""""" **Type:** ``str`` Specify the format to be used for the Git tag that will be added to the repo during a release invoked via :ref:`cmd-version`. The format string is a regular expression, which also must include the format keys below, otherwise an exception will be thrown. It *may* include any of the optional format keys, in which case the contents described will be formatted into the specified location in the Git tag that is created. For example, ``"(dev|stg|prod)-v{version}"`` is a valid ``tag_format`` matching tags such as: - ``dev-v1.2.3`` - ``stg-v0.1.0-rc.1`` - ``prod-v2.0.0+20230701`` This format will also be used for parsing tags already present in the repository into semantic versions; therefore if the tag format changes at some point in the repository's history, historic versions that no longer match this pattern will not be considered as versions. ================ ========= ========================================================== Format Key Mandatory Contents ================ ========= ========================================================== ``{version}`` Yes The new semantic version number, for example ``1.2.3``, or ``2.1.0-alpha.1+build.1234`` ================ ========= ========================================================== Tags which do not match this format will not be considered as versions of your project. **Default:** ``"v{version}"`` ---- .. _config-version_toml: ``version_toml`` """""""""""""""" **Type:** ``list[str]`` This configuration option is similar to :ref:`config-version_variables`, but it uses a TOML parser to interpret the data structure before, inserting the version. This allows users to use dot-notation to specify the version via the logical structure within the TOML file, which is more accurate than a pattern replace. The ``version_toml`` option is commonly used to update the version number in the project definition file: ``pyproject.toml`` as seen in the example below. As of v9.20.0, the ``version_toml`` option accepts a colon-separated definition with either 2 or 3 parts. The 2-part definition includes the file path and the version parameter (in dot-notation). Newly with v9.20.0, it also accepts an optional 3rd part to allow configuration of the format type. **Available Format Types** - ``nf``: Number format (ex. ``1.2.3``) - ``tf``: :ref:`Tag Format ` (ex. ``v1.2.3``) If the format type is not specified, it will default to the number format. **Example** .. code-block:: toml [semantic_release] version_toml = [ # "file:variable:[format_type]" "pyproject.toml:tool.poetry.version", # Implied Default: Number format "definition.toml:project.version:nf", # Number format "definition.toml:project.release:tf", # Tag format ] This configuration will result in the following changes: .. code-block:: diff diff a/pyproject.toml b/pyproject.toml [tool.poetry] - version = "0.1.0" + version = "0.2.0" .. code-block:: diff diff a/definition.toml b/definition.toml [project] name = "example" - version = "0.1.0" + version = "0.1.0" - release = "v0.1.0" + release = "v0.2.0" **Default:** ``[]`` ---- .. _config-version_variables: ``version_variables`` """"""""""""""""""""" **Type:** ``list[str]`` The ``version_variables`` configuration option is a list of string definitions that defines where the version number should be updated in the repository, when a new version is released. As of v9.20.0, the ``version_variables`` option accepts a colon-separated definition with either 2 or 3 parts. The 2-part definition includes the file path and the variable name. Newly with v9.20.0, it also accepts an optional 3rd part to allow configuration of the format type. **Available Format Types** - ``nf``: Number format (ex. ``1.2.3``) - ``tf``: :ref:`Tag Format ` (ex. ``v1.2.3``) If the format type is not specified, it will default to the number format. Prior to v9.20.0, PSR only supports entries with the first 2-parts as the tag format type was not available and would only replace numeric version numbers. **Example** .. code-block:: toml [semantic_release] tag_format = "v{version}" version_variables = [ # "file:variable:format_type" "src/semantic_release/__init__.py:__version__", # Implied Default: Number format "docs/conf.py:version:nf", # Number format for sphinx docs "kustomization.yml:newTag:tf", # Tag format ] First, the ``__version__`` variable in ``src/semantic_release/__init__.py`` will be updated with the next version using the `SemVer`_ number format. .. code-block:: diff diff a/src/semantic_release/__init__.py b/src/semantic_release/__init__.py - __version__ = "0.1.0" + __version__ = "0.2.0" Then, the ``version`` variable in ``docs/conf.py`` will be updated with the next version with the next version using the `SemVer`_ number format because of the explicit ``nf``. .. code-block:: diff diff a/docs/conf.py b/docs/conf.py - version = "0.1.0" + version = "0.2.0" Lastly, the ``newTag`` variable in ``kustomization.yml`` will be updated with the next version with the next version using the configured :ref:`config-tag_format` because the definition included ``tf``. .. code-block:: diff diff a/kustomization.yml b/kustomization.yml images: - name: repo/image - newTag: v0.1.0 + newTag: v0.2.0 **How It works** Each version variable will be transformed into a Regular Expression that will be used to substitute the version number in the file. The replacement algorithm is **ONLY** a pattern match and replace. It will **NOT** evaluate the code nor will PSR understand any internal object structures (ie. ``file:object.version`` will not work). The regular expression generated from the ``version_variables`` definition will: 1. Look for the specified ``variable`` name in the ``file``. The variable name can be enclosed by single (``'``) or double (``"``) quotation marks but they must match. 2. The variable name defined by ``variable`` and the version must be separated by an operand symbol (``=``, ``:``, ``:=``, or ``@``). Whitespace is optional around the symbol. As of v10.0.0, a double-equals (``==``) operator is also supported as a valid operand symbol. 3. The value of the variable must match a `SemVer`_ regular expression and can be enclosed by single (``'``) or double (``"``) quotation marks but they must match. However, the enclosing quotes of the value do not have to match the quotes surrounding the variable name. 4. If the format type is set to ``tf`` then the variable value must have the matching prefix and suffix of the :ref:`config-tag_format` setting around the `SemVer`_ version number. Given the pattern matching nature of this feature, the Regular Expression is able to support most file formats because of the similarity of variable declaration across programming languages. PSR specifically supports Python, YAML, and JSON as these have been the most commonly requested formats. This configuration option will also work regardless of file extension because it looks for a matching pattern string. .. note:: This will also work for TOML but we recommend using :ref:`config-version_toml` for TOML files as it actually will interpret the TOML file and replace the version number before writing the file back to disk. This is a comprehensive list (but not all variations) of examples where the following versions will be matched and replaced by the new version: .. code-block:: # Common variable declaration formats version='1.2.3' version = "1.2.3" release = "v1.2.3" # if tag_format is set # YAML version: 1.2.3 # JSON "version": "1.2.3" # NPM & GitHub Actions YAML version@1.2.3 version@v1.2.3 # if tag_format is set # Walrus Operator version := "1.2.3" # Excessive whitespace version = '1.2.3' # Mixed Quotes "version" = '1.2.3' # Custom Tag Format with tag_format set (monorepos) __release__ = "module-v1.2.3" # requirements.txt my-package == 1.2.3 .. important:: The Regular Expression expects a version value to exist in the file to be replaced. It cannot be an empty string or a non-semver compliant string. If this is the very first time you are using PSR, we recommend you set the version to ``0.0.0``. This may become more flexible in the future with resolution of issue `#941`_. .. _#941: https://github.com/python-semantic-release/python-semantic-release/issues/941 .. warning:: If the file (ex. JSON) you are replacing has two of the same variable name in it, this pattern match will not be able to differentiate between the two and will replace both. This is a limitation of the pattern matching and not a bug. **Default:** ``[]`` .. _SemVer: https://semver.org/ python-semantic-release-10.4.1/docs/configuration/index.rst000066400000000000000000000013311506116242600237760ustar00rootroot00000000000000.. _configuration: Configuration ============= Python Semantic Release is highly configurable, allowing you to tailor it to your project's needs. It supports various runtime environments and can be integrated with different CI/CD services. #. Check out our set of :ref:`configuration guides ` to help walk you through the set up of common project customizations. #. Dive in deep and explore the full set of possible :ref:`customization options `. #. Go Automatic and Configure your :ref:`CI/CD services ` to use Python Semantic Release. .. toctree:: :maxdepth: 1 :hidden: Guides Options automatic-releases/index python-semantic-release-10.4.1/docs/contributing/000077500000000000000000000000001506116242600217775ustar00rootroot00000000000000python-semantic-release-10.4.1/docs/contributing/contributing_guide.rst000066400000000000000000000000441506116242600264130ustar00rootroot00000000000000.. include:: ../../CONTRIBUTING.rst python-semantic-release-10.4.1/docs/contributing/index.rst000066400000000000000000000020121506116242600236330ustar00rootroot00000000000000.. _contributing: Contributing ============ Love Python Semantic Release? Want to help out? There are many ways you can contribute to the project! You can help by: - Reporting bugs and issues - Suggesting new features - Improving the documentation - Reviewing pull requests - Contributing code - Helping with translations - Spreading the word about Python Semantic Release - Participating in discussions - Testing new features and providing feedback No matter how you choose to contribute, please check out our :ref:`Contributing Guidelines ` and know we appreciate your help! **Check out all the folks whom already contributed to Python Semantic Release and become one of them today!** |contributors| .. |contributors| image:: https://contributors-img.web.app/image?repo=python-semantic-release/python-semantic-release :target: https://github.com/python-semantic-release/python-semantic-release/graphs/contributors .. toctree:: :hidden: :maxdepth: 1 Contributing Guide python-semantic-release-10.4.1/docs/index.rst000066400000000000000000000061521506116242600211350ustar00rootroot00000000000000Python Semantic Release *********************** |PyPI Version| |conda-forge version| |Last Release| |Monthly Downloads| |PSR License| |Issues| **Python Semantic Release (PSR)** provides an automated release mechanism determined by SemVer and Commit Message Conventions for your Git projects. The purpose of this project is to detect what the next version of the project should be from parsing the latest commit messages. If the commit messages describe changes that would require a major, minor or patch version bump, PSR will automatically bump the version number accordingly. PSR, however, does not stop there but will help automate the whole release process. It will update the project code and distribution artifact, upload the artifact and post changelogs to a remotely hosted Version Control System (VCS). The tool is designed to run inside of a CI/CD pipeline service, but it can also be run locally. This project was originally inspired by the `semantic-release`_ project for JavaScript by *Stephan Bönnemann*, but the codebases have significantly deviated since then, as PSR as driven towards the goal of providing flexible changelogs and simple initial setup. .. include:: concepts/installation.rst Read more about the setup and configuration in our :ref:`Getting Started Guide `. .. _semantic-release: https://github.com/semantic-release/semantic-release .. |PyPI Version| image:: https://img.shields.io/pypi/v/python-semantic-release?label=PyPI&logo=pypi :target: https://pypi.org/project/python-semantic-release/ :alt: pypi .. |conda-forge Version| image:: https://img.shields.io/conda/vn/conda-forge/python-semantic-release?logo=anaconda :target: https://anaconda.org/conda-forge/python-semantic-release :alt: conda-forge .. |Last Release| image:: https://img.shields.io/github/release-date/python-semantic-release/python-semantic-release?display_date=published_at :target: https://github.com/python-semantic-release/python-semantic-release/releases/latest :alt: GitHub Release Date .. |PSR License| image:: https://img.shields.io/pypi/l/python-semantic-release?color=blue :target: https://github.com/python-semantic-release/python-semantic-release/blob/master/LICENSE :alt: PyPI - License .. |Issues| image:: https://img.shields.io/github/issues/python-semantic-release/python-semantic-release :target: https://github.com/python-semantic-release/python-semantic-release/issues :alt: GitHub Issues .. |Monthly Downloads| image:: https://img.shields.io/pypi/dm/python-semantic-release :target: https://pypistats.org/packages/python-semantic-release :alt: PyPI - Downloads Documentation Contents ====================== .. toctree:: :maxdepth: 1 What's New Concepts CLI configuration/index upgrading/index misc/troubleshooting API Contributing View on GitHub ---- .. _inline-getting-started-guide: .. include:: concepts/getting_started.rst :start-after: .. _getting-started-guide: python-semantic-release-10.4.1/docs/make.bat000066400000000000000000000151171506116242600207020ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\python-semantic-release.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\python-semantic-release.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end python-semantic-release-10.4.1/docs/misc/000077500000000000000000000000001506116242600202235ustar00rootroot00000000000000python-semantic-release-10.4.1/docs/misc/psr_changelog.rst000066400000000000000000000000411506116242600235630ustar00rootroot00000000000000.. include:: ../../CHANGELOG.rst python-semantic-release-10.4.1/docs/misc/troubleshooting.rst000066400000000000000000000026531506116242600242120ustar00rootroot00000000000000.. _troubleshooting: Troubleshooting =============== - Check your configuration file for :ref:`configuration` - Check your Git tags match your :ref:`tag_format `; tags using other formats are ignored during calculation of the next version. .. _troubleshooting-verbosity: Increasing Verbosity -------------------- If you are having trouble with Python Semantic Release or would like to see additional information about the actions that it is taking, you can use the top-level :ref:`cmd-main-option-verbosity` option. This can be supplied multiple times to increase the logging verbosity of the :ref:`cmd-main` command or any of its subcommands during their execution. You can supply this as many times as you like, but supplying more than twice has no effect. Supply :ref:`cmd-main-option-verbosity` once for ``INFO`` output, and twice for ``DEBUG``. For example:: semantic-release -vv version --print .. note:: The :ref:`cmd-main-option-verbosity` option must be supplied to the top-level ``semantic-release`` command, before the name of any sub-command. .. warning:: The volume of logs when using ``DEBUG`` verbosity may be significantly increased, compared to ``INFO`` or the default ``WARNING``, and as a result executing commands with ``semantic-release`` may be significantly slower than when using ``DEBUG``. .. note:: The provided GitHub action sets the verbosity level to INFO by default. python-semantic-release-10.4.1/docs/upgrading/000077500000000000000000000000001506116242600212505ustar00rootroot00000000000000python-semantic-release-10.4.1/docs/upgrading/08-upgrade.rst000066400000000000000000000513141506116242600236620ustar00rootroot00000000000000.. _upgrade_v8: Upgrading to v8 =============== Python Semantic Release v8.0.0 introduced a number of breaking changes. The internals have been changed significantly to better support highly-requested features and to streamline the maintenance of the project. As a result, certain things have been removed, reimplemented differently, or now exhibit different behavior to earlier versions of Python Semantic Release. This page is a guide to help projects to ``pip install python-semantic-release>=8.0.0`` with fewer surprises. .. _upgrade_v8-github-action: Python Semantic Release GitHub Action ------------------------------------- .. _upgrade_v8-removed-artefact-upload: GitHub Action no longer publishes artifacts to PyPI or GitHub Releases """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" Python Semantic Release no longer uploads distributions to PyPI - see :ref:`upgrade_v8-commands-repurposed-version-and-publish`. If you are using Python Semantic Release to publish release notes and artifacts to GitHub releases, there is a new GitHub Action `upload-to-gh-release`_ which will perform this action for you. This means the following workflows perform the same actions, and if you are using the former, you will need to modify your workflow to include the steps in the latter. This workflow is written to use Python Semantic Release v7.33.5: .. code:: yaml --- name: Semantic Release on: push: branches: - main jobs: release: runs-on: ubuntu-latest concurrency: release steps: - uses: actions/checkout@v3 with: fetch-depth: 0 # This action uses Python Semantic Release v7 - name: Python Semantic Release uses: python-semantic-release/python-semantic-release@v7.33.5 with: github_token: ${{ secrets.GITHUB_TOKEN }} repository_username: __token__ repository_password: ${{ secrets.PYPI_TOKEN }} The following workflow achieves the same result using Python Semantic Release v8, the `upload-to-gh-release`_ GitHub Action, and the `pypa/gh-action-pypi-publish`_ GitHub Action: .. code:: yaml --- name: Semantic Release on: push: branches: - main jobs: release: runs-on: ubuntu-latest concurrency: release permissions: id-token: write steps: - uses: actions/checkout@v3 with: fetch-depth: 0 # This action uses Python Semantic Release v8 - name: Python Semantic Release id: release uses: python-semantic-release/python-semantic-release@v8.7.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@v1 # NOTE: DO NOT wrap the conditional in ${{ }} as it will always evaluate to true. # See https://github.com/actions/runner/issues/1173 if: steps.release.outputs.released == 'true' - name: Publish package distributions to GitHub Releases uses: python-semantic-release/upload-to-gh-release@v8.7.0 if: steps.release.outputs.released == 'true' with: github_token: ${{ secrets.GITHUB_TOKEN }} .. _upload-to-gh-release: https://github.com/python-semantic-release/upload-to-gh-release .. _pypa/gh-action-pypi-publish: https://github.com/pypa/gh-action-pypi-publish .. _upgrade_v8-github-action-removed-pypi-token: Removal of ``pypi_token``, ``repository_username`` and ``repository_password`` inputs """"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" Since the library no longer supports publishing to PyPI, the ``pypi_token``, ``repository_username`` and ``repository_password`` inputs of the GitHub action have all been removed. See the above section for how to publish to PyPI using the official GitHub Action from the Python Packaging Authority (PyPA). .. _upgrade_v8-options-inputs: Rename ``additional_options`` to ``root_options`` """"""""""""""""""""""""""""""""""""""""""""""""" Because the purposes of the :ref:`cmd-version` and :ref:`cmd-publish` commands have changed, the GitHub action now performs both commands in sequence. For this reason, and because the usage of the CLI has changed, ``additional_options`` has been renamed to ``root_options`` to reflect the fact that the options are for the main :ref:`cmd-main` command group. .. _upgrade_v8-commands: Commands -------- .. _upgrade_v8-commands-repurposed-version-and-publish: Repurposing of ``version`` and ``publish`` commands """"""""""""""""""""""""""""""""""""""""""""""""""" Python Semantic Release's primary purpose is to enable automation of correct semantic versioning for software projects. Over the years, this automation has been extended to include other actions such as building/publishing the project and its artifacts to artefact repositories, creating releases in remote version control systems, and writing changelogs. In Python Semantic Release <8.0.0, the ``publish`` command was a one-stop-shop for performing every piece of automation provided. This has been changed - the ``version`` command now handles determining the next version, applying the changes to the project metadata according to the configuration, writing a changelog, and committing/pushing changes to the remote Git repository. It also handles creating a release in the remote VCS. It does *not* publish software artifacts to remote repositories such as PyPI; the rationale behind this decision is simply that under the hood, Python Semantic Release used `twine`_ to upload artifacts to package indexes such as PyPI, and it's recommended to use twine directly via the command-line. From the twine `documentation `_: Twine is a command-line tool for interacting with PyPI securely over HTTPS. As a result Python Semantic Release no longer depends on twine internals. The ``publish`` command now handles publishing software artifacts to releases in the remote version control system. .. _twine: https://twine.readthedocs.io/en/stable .. _twine upload: https://twine.readthedocs.io/en/stable/#twine-upload To achieve a similar flow of logic such as 1. Determine the next version 2. Write this version to the configured metadata locations 3. Write the changelog 4. Push the changes to the metadata and changelog to the remote repository 5. Create a release in the remote version control system 6. Build a wheel 7. Publish the wheel to PyPI 8. Publish the distribution artifacts to the release in the remote VCS You should run:: semantic-release version twine upload dist/* # or whichever path your distributions are placed in semantic-release publish With steps 1-6 being handled by the :ref:`cmd-version` command, step 7 being left to the developer to handle, and lastly step 8 to be handled by the :ref:`cmd-publish` command. .. _upgrade_v8-removed-define-option: Removal of ``-D/--define`` command-line option """""""""""""""""""""""""""""""""""""""""""""" It is no longer possible to override arbitrary configuration values using the ``-D``/ ``--define`` option. You should provide the appropriate values via a configuration file using :ref:`cmd-main-option-config` or via the available command-line options. This simplifies the command-line option parsing significantly and is less error-prone, which has resulted in previous issues (e.g. `#600`_) with overrides on the command-line. Some of the configuration values expected by Python Semantic Release use complex data types such as lists or nested structures, which would be tedious and error-prone to specify using just command-line options. .. _#600: https://github.com/python-semantic-release/python-semantic-release/issues/600 .. _upgrade_v8-commands-no-verify-ci: Removal of CI verifications """"""""""""""""""""""""""" Prior to v8, Python Semantic Release would perform some prerequisite verification of environment variables before performing any version changes using the ``publish`` command. It's not feasible for Python Semantic Release to verify any possible CI environment fully, and these checks were only triggered if certain environment variables were set - they wouldn't fail locally. These checks previously raised :py:class:``semantic_release.CiVerificationError``, and were the only place in which this custom exception was used. Therefore, this exception has **also** been removed from Python Semantic Release in v8. If you were relying on this functionality, it's recommended that you add the following shell commands *before* invoking ``semantic-release`` to verify your environment: .. note:: In the following, $RELEASE_BRANCH refers to the git branch against which you run your releases using Python Semantic Release. You will need to ensure it is set properly (e.g. via ``export RELEASE_BRANCH=main`` and/or replace the variable with the branch name you want to verify the CI environment for. .. _upgrade_v8-commands-no-verify-ci-travis: Travis ~~~~~~ **Condition**: environment variable ``TRAVIS=true`` **Replacement**: .. code-block:: bash if ! [[ $TRAVIS_BRANCH == $RELEASE_BRANCH && \ $TRAVIS_PULL_REQUEST == 'false' ]]; then exit 1 fi .. _upgrade_v8-commands-no-verify-ci-semaphore: Semaphore ~~~~~~~~~ **Condition**: environment variable ``SEMAPHORE=true`` **Replacement**: .. code-block:: bash if ! [[ $BRANCH_NAME == $RELEASE_BRANCH && \ $SEMAPHORE_THREAD_RESULT != 'failed' && \ -n $PULL_REQUEST_NUMBER ]]; then exit 1 fi .. _upgrade_v8-commands-no-verify-ci-frigg: Frigg ~~~~~ **Condition**: environment variable ``FRIGG=true`` **Replacement**: .. code-block:: bash if ! [[ $FRIGG_BUILD_BRANCH == $RELEASE_BRANCH && \ -n $FRIGG_PULL_REQUEST ]]; then exit 1 fi .. _upgrade_v8-commands-no-verify-ci-circle-ci: Circle CI ~~~~~~~~~ **Condition**: environment variable ``CIRCLECI=true`` **Replacement**: .. code-block:: bash if ! [[ $CIRCLE_BRANCH == $RELEASE_BRANCH && \ -n $CI_PULL_REQUEST ]]; then exit 1 fi .. _upgrade_v8-commands-no-verify-ci-gitlab-ci: GitLab CI ~~~~~~~~~ **Condition**: environment variable ``GITLAB_CI=true`` **Replacement**: .. code-block:: bash if ! [[ $CI_COMMIT_REF_NAME == $RELEASE_BRANCH ]]; then exit 1 fi .. _upgrade_v8-commands-no-verify-ci-bitbucket: **Condition**: environment variable ``BITBUCKET_BUILD_NUMBER`` is set **Replacement**: .. code-block:: bash if ! [[ $BITBUCKET_BRANCH == $RELEASE_BRANCH && \ -n $BITBUCKET_PR_ID ]]; then exit 1 fi .. _upgrade_v8-commands-no-verify-ci-jenkins: Jenkins ~~~~~~~ **Condition**: environment variable ``JENKINS_URL`` is set **Replacement**: .. code-block:: bash if [[ -z $BRANCH_NAME ]]; then BRANCH_NAME=$BRANCH_NAME elif [[ -z $GIT_BRANCH ]]; then BRANCH_NAME=$GIT_BRANCH fi if ! [[ $BRANCH_NAME == $RELEASE_BRANCH && \ -n $CHANGE_ID ]]; then exit 1 fi .. _upgrade_v8-removed-build-status-checking: Removal of Build Status Checking """""""""""""""""""""""""""""""" Prior to v8, Python Semantic Release contained a configuration option, ``check_build_status``, which would attempt to prevent a release being made if it was possible to identify that a corresponding build pipeline was failing. For similar reasons to those motivating the removal of :ref:`CI Checks `, this feature has also been removed. If you are leveraging this feature in Python Semantic Release v7, the following bash commands will replace the functionality, and you can add these to your pipeline. You will need to install ``jq`` and ``curl`` to run these commands; they can be easily installed through your system's package manager, for example on Ubuntu: .. code-block:: bash sudo apt update && sudo apt upgrade sudo apt install -y curl jq On Windows, you can refer to the `installation guide for jq`_, and if ``curl`` is not already installed, you can download it from `the curl website`_ .. _installation guide for jq: https://jqlang.github.io/jq/download/ .. _the curl website: https://curl.se/ .. _upgrade_v8-removed-build-status-checking-github: GitHub ~~~~~~ .. code-block:: bash export RESP="$( curl \ -H "Authorization: token $GITHUB_TOKEN" \ -fSsL https://$GITHUB_API_DOMAIN/repos/$REPO_OWNER/$REPO_NAME/commits/$(git rev-parse HEAD)/status || exit 1 )" if [ $(jq -r '.state' <<< "$RESP") != "success" ]; then echo "Build status is not success" >&2 exit 1 fi Note that ``$GITHUB_API_DOMAIN`` is typically ``api.github.com`` unless you are using GitHub Enterprise with a custom domain name. .. _upgrade_v8-removed-build-status-checking-gitea: Gitea ~~~~~ .. code-block:: bash export RESP="$( curl \ -H "Authorization: token $GITEA_TOKEN" \ -fSsL https://$GITEA_DOMAIN/repos/$REPO_OWNER/$REPO_NAME/statuses/$(git rev-parse HEAD) || exit 1 )" if [ $(jq -r '.state' <<< "$RESP") != "success" ]; then echo "Build status is not success" >&2 exit 1 fi .. _upgrade_v8-removed-build-status-checking-gitlab: Gitlab ~~~~~~ .. code-block:: bash export RESP="$( curl \ -H "Authorization: token $GITLAB_TOKEN" \ -fSsL https://$GITLAB_DOMAIN/api/v4/projects/$PROJECT_ID/repository/commits/$(git rev-parse HEAD)/statuses )" for line in $(jq -r '.[] | [.name, .status, .allow_failure] | join("|")' <<<"$RESP"); do IFS="|" read -r job_name job_status allow_failure <<<"$line" if [ "$job_status" == "pending" ]; then echo "job $job_name is pending" >&2 exit 1 elif [ "$job_status" == "failed" ] && [ ! "$allow_failure" == "true" ]; then echo "job $job_name failed" >&2 exit 1 fi done .. _upgrade_v8-commands-multibranch-releases: Multibranch releases """""""""""""""""""" Prior to v8, Python Semantic Release would perform ``git checkout`` to switch to your configured release branch and determine if a release would need to be made. In v8 this has been changed - you must manually check out the branch which you would like to release against, and if you would like to create releases against this branch you must also ensure that it belongs to a :ref:`release group `. .. _upgrade_v8-commands-changelog: ``changelog`` command """"""""""""""""""""" A new option, :ref:`cmd-changelog-option-post-to-release-tag` has been added. If you omit this argument on the command line then the changelog rendering process, which is described in more detail at :ref:`changelog-templates-template-rendering`, will be triggered, but the new changelog will not be posted to any release. If you use this new command-line option, it should be set to a tag within the remote which has a corresponding release. For example, to update the changelog and post it to the release corresponding to the tag ``v1.1.4``, you should run:: semantic-release changelog --post-to-release-tag v1.1.4 .. _upgrade_v8-changelog-customization: Changelog customization """"""""""""""""""""""" A number of options relevant to customizing the changelog have been removed. This is because Python Semantic Release now supports authoring a completely custom `Jinja`_ template with the contents of your changelog. Historically, the number of options added to Python Semantic Release in order to allow this customization has grown significantly; it now uses templates in order to fully open up customizing the changelog's appearance. .. _Jinja: https://jinja.palletsprojects.com/en/3.1.x/ .. _upgrade_v8-configuration: Configuration ------------- The configuration structure has been completely reworked, so you should read :ref:`configuration` carefully during the process of upgrading to v8+. However, some common pitfalls and potential sources of confusion are summarized here. .. _upgrade_v8-configuration-setup-cfg-unsupported: ``setup.cfg`` is no longer supported """""""""""""""""""""""""""""""""""" Python Semantic Release no longer supports configuration via ``setup.cfg``. This is because the Python ecosystem is centering around ``pyproject.toml`` as universal tool and project configuration file, and TOML allows expressions via configuration, such as the mechanism for declaring configuration via environment variables, which introduce much greater complexity to support in the otherwise equivalent ``ini``-format configuration. You can use :ref:`cmd-generate-config` to generate new-format configuration that can be added to ``pyproject.toml``, and adjust the default settings according to your needs. .. warning:: If you don't already have a ``pyproject.toml`` configuration file, ``pip`` can change its behavior once you add one, as a result of `PEP-517`_. If you find that this breaks your packaging, you can add your Python Semantic Release configuration to a separate file such as ``semantic-release.toml``, and use the :ref:`--config ` option to reference this alternative configuration file. More detail about this issue can be found in this `pip issue`_. .. _PEP-517: https://peps.python.org/pep-0517/#evolutionary-notes .. _pip issue: https://github.com/pypa/pip/issues/8437#issuecomment-805313362 .. _upgrade_v8-commit-parser-options: Commit parser options """"""""""""""""""""" Options such as ``major_emoji``, ``parser_angular_patch_types`` or ``parser_angular_default_level_bump`` have been removed. Instead, these have been replaced with a single set of recognized commit parser options, ``allowed_tags``, ``major_tags``, ``minor_tags``, and ``patch_tags``, though the interpretation of these is up to the specific parsers in use. You can read more detail about using commit parser options in :ref:`commit_parser_options `, and if you need to parse multiple commit styles for a single project it's recommended that you create a parser following :ref:`commit_parser-custom_parser` that is tailored to the specific needs of your project. .. _upgrade_v8-version-variable-rename: ``version_variable`` """""""""""""""""""" This option has been renamed to :ref:`version_variables ` as it refers to a list of variables which can be updated. .. _upgrade_v8-version-pattern-removed: ``version_pattern`` """"""""""""""""""" This option has been removed. It's recommended to use an alternative tool to perform substitution using arbitrary regular expressions, such as ``sed``. You can always use Python Semantic Release to identify the next version to be created for a project and store this in an environment variable like so:: export VERSION=$(semantic-release version --print) .. _upgrade_v8-version-toml-type: ``version_toml`` """""""""""""""" This option will no longer accept a string or comma-separated string of version locations to be updated in TOML files. Instead, you must supply a ``List[str]``. For existing configurations using a single location in this option, you can simply wrap the value in ``[]``: .. code-block:: toml # Python Semantic Release v7 configuration [tool.semantic_release] version_toml = "pyproject.toml:tool.poetry.version" # Python Semantic Release v8 configuration [tool.semantic_release] version_toml = ["pyproject.toml:tool.poetry.version"] .. _upgrade_v8-tag-format-validation: ``tag_format`` """""""""""""" This option has the same effect as it did in Python Semantic Release prior to v8, but Python Semantic Release will now verify that it has a ``{version}`` format key and raise an error if this is not the case. .. _upgrade_v8-upload-to-release-rename: ``upload_to_release`` """"""""""""""""""""" This option has been renamed to :ref:`upload_to_vcs_release `. .. _upgrade_v8-custom-commit-parsers: Custom Commit Parsers --------------------- Previously, a custom commit parser had to satisfy the following criteria: * It should be ``import``-able from the virtual environment where the ``semantic-release`` is run * It should be a function which accepts the commit message as its only argument and returns a :py:class:`semantic_release.history.parser_helpers.ParsedCommit` if the commit is parsed successfully, or raise a :py:class:`semantic_release.UnknownCommitMessageStyleError` if parsing is unsuccessful. It is still possible to implement custom commit parsers, but the interface for doing so has been modified with stronger support for Python type annotations and broader input provided to the parser to enable capturing more information from each commit, such as the commit's date and author, if desired. A full guide to implementing a custom commit parser can be found at :ref:`commit_parser-custom_parser`. python-semantic-release-10.4.1/docs/upgrading/09-upgrade.rst000066400000000000000000000007061506116242600236620ustar00rootroot00000000000000.. _upgrade_v9: Upgrading to v9 =============== You are in luck! The upgrade to ``v9`` is a simple one. The breaking change for this version is the removal of support for **Python 3.7**, as it has passed End-Of-Life (EOL). This means that if you are using Python 3.7, you will need to upgrade to at least Python 3.8 in order to use ``v9``. This will be permanent as all future versions of ``python-semantic-release`` will require Python 3.8 or later. python-semantic-release-10.4.1/docs/upgrading/10-upgrade.rst000066400000000000000000000202071506116242600236500ustar00rootroot00000000000000.. _upgrade_v10: Upgrading to v10 ================ The upgrade to v10 is primarily motivated by a command injection security vulnerability found in the GitHub Actions configuration interpreter (see details :ref:`below `). We also bundled a number of other changes, including new default configuration values and most importantly, a return to 1-line commit subjects in the default changelog format. For more specific change details for v10, please refer to the :ref:`changelog-v10.0.0` section of the :ref:`changelog`. .. _upgrade_v10-root_options: Security Fix: Command Injection Vulnerability (GitHub Actions) -------------------------------------------------------------- In the previous versions of the GitHub Actions configuration, we used a single ``root_options`` parameter to pass any options you wanted to pass to the ``semantic-release`` main command. This parameter was interpreted as a string and passed directly to the command line, which made it vulnerable to command injection attacks. An attacker could exploit this by crafting a malicious string as the :ref:`gh_actions-psr-inputs-root_options` input, and then it would be executed as part of the command line, potentially allowing them to run arbitrary commands within the GitHub Actions Docker container. The ability to exploit this vulnerability is limited to people whom can modify the GitHub Actions workflow file, which is typically only the repository maintainers unless you are pointing at an organizational workflow file or another third-party workflow file. To mitigate this vulnerability, we have removed the ``root_options`` parameter completely and replaced it with individual boolean flag inputs which are then used to select the proper cli parameters for the ``semantic-release`` command. Additionally, users can protect themselves by limiting the access to secrets in their GitHub Actions workflows and the permissions of the GitHub Actions CI TOKEN. This vulnerability existed in both the :ref:`python-semantic-release/python-semantic-release ` and :ref:`python-semantic-release/publish-action ` actions. For the main :ref:`python-semantic-release/python-semantic-release ` action, the following inputs are now available (in place of the old ``root_options`` parameter): :ref:`gh_actions-psr-inputs-config_file`, :ref:`gh_actions-psr-inputs-noop`, :ref:`gh_actions-psr-inputs-strict`, and :ref:`gh_actions-psr-inputs-verbosity`. **Example migration** If you previously had the following in your GitHub Actions workflow file: .. code:: yaml - uses: python-semantic-release/python-semantic-release@v9 with: root_options: "-vv --strict" It would be updated to: .. code:: yaml - uses: python-semantic-release/python-semantic-release@v10 with: strict: true verbosity: 2 For the :ref:`python-semantic-release/publish-action ` action, the following inputs are now available (in place of the old ``root_options`` parameter): :ref:`gh_actions-publish-inputs-config_file`, :ref:`gh_actions-publish-inputs-noop`, and :ref:`gh_actions-publish-inputs-verbosity`. **Example migration** If you previously had the following in your GitHub Actions workflow file: .. code:: yaml - uses: python-semantic-release/publish-action@v9 with: root_options: "-v -c /path/to/releaserc.yaml" It would be updated to: .. code:: yaml - uses: python-semantic-release/publish-action@v10 with: config_file: /path/to/releaserc.yaml verbosity: 1 .. _upgrade_v10-changelog_format-1_line_commit_subjects: Changelog Format: 1-Line Commit Subjects ---------------------------------------- In v10, the default changelog format has been changed to use 1-line commit subjects instead of including the full commit message. This change was made to improve the readability of the changelog as many commit messages are long and contain unnecessary details for the changelog. .. important:: If you use a squash commit merge strategy, it is recommended that you use the default ``parse_squash_commits`` commit parser option to ensure that all the squashed commits are parsed for version bumping and changelog generation. This is the default behavior in v10 across all supported commit parsers. If you are upgrading, you likely will need to manually set this option in your configuration file to ensure that the changelog is generated correctly. If you do not enable ``parse_squash_commits``, then version will only be determined by the commit subject line and the changelog will only include the commit subject line as well. .. _upgrade_v10-changelog_format-mask_initial_release: Changelog Format: Mask Initial Release -------------------------------------- In v10, the default behavior for the changelog generation has been changed to mask the initial release in the changelog. This means that the first release will not contain a break down of the different types of changes (e.g., features, fixes, etc.), but instead it will just simply state that this is the initial release. .. _upgrade_v10-changelog_format-commit_parsing: Changelog Format: Commit Parsing -------------------------------- We have made some minor changes to the commit parsing logic in *v10* to separate out components of the commit message more clearly. You will find that the :py:class:`ParsedCommit ` object's descriptions list will no longer contain any Breaking Change footers, Release Notice footers, PR/MR references, or Issue Closure footers. These were all previously extracted and placed into their own attributes but were still included in the descriptions list. In *v10*, the descriptions list will only contain the actual commit subject line and any additional commit body text that is not part of the pre-defined footers. If you were relying on the descriptions list to contain these footers, you will need to update your code and changelog templates to reference the specific attributes you want to use. .. _upgrade_v10-default_config: Default Configuration Changes ----------------------------- The following table summarizes the changes to the default configuration values in v10: .. list-table:: :widths: 5 55 20 20 :header-rows: 1 * - # - Configuration Option - Previous Default Value - New Default Value * - 1 - :ref:`config-allow_zero_version` - ``true`` - ``false`` * - 2 - :ref:`changelog.mode ` - ``init`` - ``update`` * - 3 - :ref:`changelog.default_templates.mask_initial_release ` - ``false`` - ``true`` * - 4 - :ref:`commit_parser_options.parse_squash_commits ` - ``false`` - ``true`` * - 5 - :ref:`commit_parser_options.ignore_merge_commits ` - ``false`` - ``true`` .. _upgrade_v10-deprecations: Deprecations & Removals ----------------------- No additional deprecations were made in *v10*, but the following are staged for removal in v11: .. list-table:: Deprecated Features & Functions :widths: 5 30 10 10 45 :header-rows: 1 * - # - Component - Deprecated - Planned Removal - Notes * - 1 - :ref:`GitHub Actions root_options ` - v10.0.0 - v10.0.0 - Replaced with individual boolean flag inputs. See :ref:`above ` for details. * - 2 - :ref:`Angular Commit Parser ` - v9.19.0 - v11.0.0 - Replaced by the :ref:`Conventional Commit Parser `. * - 3 - :ref:`Tag Commit Parser ` - v9.12.0 - v11.0.0 - Replaced by the :ref:`Emoji Commit Parser `. .. note:: For the most up-to-date information on the next version deprecations and removals, please refer to the issue `#1066 `_. python-semantic-release-10.4.1/docs/upgrading/index.rst000066400000000000000000000016441506116242600231160ustar00rootroot00000000000000.. _upgrading: ============= Upgrading PSR ============= Upgrading PSR is a process that may involve several steps, depending on the version you are upgrading from and to. This section provides a guide for upgrading from older versions of PSR to the latest version. .. important:: If you are upgrading across **more than one** major version, you should incrementally upgrade through each major version and its configuration update guide to ensure a smooth transition. For example, if you are upgrading from v7 to v10, you should first upgrade to v8 and then to v9, and then lastly to v10 while following the upgrade guide for each version. At each step you should confirm execution works as expected before proceeding to the next version. .. toctree:: :caption: Upgrade Guides :maxdepth: 1 Upgrading to v10 <10-upgrade> Upgrading to v9 <09-upgrade> Upgrading to v8 <08-upgrade> python-semantic-release-10.4.1/pyproject.toml000066400000000000000000000247511506116242600212650ustar00rootroot00000000000000# Ref: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ # and https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html [build-system] requires = ["setuptools >= 75.3.0, < 81.0.0", "wheel ~= 0.42"] build-backend = "setuptools.build_meta" [project] name = "python-semantic-release" version = "10.4.1" description = "Automatic Semantic Versioning for Python projects" requires-python = "~= 3.8" license = { text = "MIT" } classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", ] readme = "README.rst" authors = [{ name = "Rolf Erik Lekang", email = "me@rolflekang.com" }] dependencies = [ "click ~= 8.1.0", "click-option-group ~= 0.5", "gitpython ~= 3.0", "requests ~= 2.25", "jinja2 ~= 3.1", "python-gitlab >= 4.0.0, < 7.0.0", "tomlkit ~= 0.11", "dotty-dict ~= 1.3", "importlib-resources ~= 6.0", "pydantic ~= 2.0", "rich ~= 14.0", "shellingham ~= 1.5", "Deprecated ~= 1.2", # Backport of deprecated decorator for python 3.8 ] [project.scripts] python-semantic-release = "semantic_release.__main__:main" semantic-release = "semantic_release.__main__:main" psr = "semantic_release.__main__:main" [project.urls] changelog = "https://github.com/python-semantic-release/python-semantic-release/blob/master/CHANGELOG.md" documentation = "https://python-semantic-release.readthedocs.io" homepage = "https://python-semantic-release.readthedocs.io" issues = "https://github.com/python-semantic-release/python-semantic-release/issues" repository = "http://github.com/python-semantic-release/python-semantic-release.git" [project.optional-dependencies] build = [ "build ~= 1.2" ] docs = [ "Sphinx ~= 6.0", "sphinxcontrib-apidoc == 0.5.0", "sphinx-autobuild == 2024.2.4", "furo ~= 2024.1", ] test = [ "coverage[toml] ~= 7.0", "filelock ~= 3.15", "flatdict ~= 4.0", "freezegun ~= 1.5", "pyyaml ~= 6.0", "pytest ~= 8.3", "pytest-clarity ~= 1.0", "pytest-cov >= 5.0.0, < 7.0.0", "pytest-env ~= 1.0", "pytest-lazy-fixtures ~= 1.1.1", "pytest-mock ~= 3.0", "pytest-order ~= 1.3", "pytest-pretty ~= 1.2", "pytest-xdist ~= 3.0", "responses ~= 0.25.0", "requests-mock ~= 1.10", ] dev = [ "pre-commit ~= 3.5", "tox ~= 4.11", "ruff == 0.6.1" ] mypy = [ "mypy == 1.16.1", "types-Deprecated ~= 1.2", "types-requests ~= 2.32.0", "types-pyyaml ~= 6.0", ] [tool.setuptools] include-package-data = true [tool.setuptools.packages.find] where = ["src"] [tool.pytest.ini_options] env = [ "PYTHONHASHSEED = 123456" ] addopts = [ # TO DEBUG in single process, swap auto to 0 # "-nauto", # "-n0", "-ra", "--diff-symbols", "--durations=20", # No default coverage - causes problems with debuggers # "--cov=semantic_release", # "--cov-context=test", # "--cov-report=html:coverage-html", # "--cov-report=term-missing", ] testpaths = [ "tests" ] markers = [ "unit: mark a test as a unit test", "e2e: mark a test as a end-to-end test", "comprehensive: mark a test as a comprehensive (multiple variations) test", ] [tool.coverage.html] show_contexts = true [tool.coverage.run] omit = ["*/tests/*"] [tool.bandit] targets = ["semantic_release"] [tool.tox] legacy_tox_ini = """ [tox] envlist = mypy, py{38,39,310,311,312}, coverage ruff skipsdist = True [testenv] passenv = CI setenv = PYTHONPATH = {toxinidir} TESTING = True deps = .[test] commands = coverage run -p --source=semantic_release -m pytest {posargs:tests} [testenv:mypy] deps = .[mypy] commands = mypy . [testenv:coverage] deps = coverage[toml] commands = coverage combine coverage report -m coverage xml [testenv:ruff] deps = .[dev] commands = ruff check . --statistics --output-format=text """ [tool.mypy] python_version = "3.8" show_column_numbers = true show_error_context = true pretty = true error_summary = true follow_imports = "normal" enable_error_code = ["ignore-without-code"] disallow_untyped_calls = true # warn_return_any = true strict_optional = true warn_no_return = true warn_redundant_casts = true # warn_unused_ignores = true warn_unused_configs = true # warn_unreachable = true disallow_untyped_defs = true check_untyped_defs = true cache_dir = "/dev/null" plugins = ["pydantic.mypy"] [[tool.mypy.overrides]] module = "tests.*" disallow_untyped_defs = false [[tool.mypy.overrides]] module = "flatdict" ignore_missing_imports = true [[tool.mypy.overrides]] module = "shellingham" ignore_missing_imports = true [[tool.mypy.overrides]] module = "dotty_dict" ignore_missing_imports = true [tool.ruff] line-length = 88 target-version = "py38" force-exclude = true output-format = "grouped" show-fixes = true src = ["semantic_release", "tests"] [tool.ruff.lint] select = ["ALL"] # See https://docs.astral.sh/ruff/rules/ # for any of these codes you can also run `ruff rule [CODE]` # which explains it in the terminal ignore = [ # attribute shadows builtin (e.g. Foo.list()) "A003", # Annotations (flake8-annotations) # missing "self" type-hint "ANN101", "ANN102", "ANN401", # flake8-bugbear "B019", # flake8-commas "COM", # Missing docstrings - eventually want to enable "D100", "D101", "D102", "D103", "D104", "D105", "D107", "D203", "D205", "D212", "D400", "D401", "D404", "D415", # flake8-datetimez "DTZ", # flake8-errmsg "EM", # Some todos and some examples; leave this disabled for now "ERA001", # don't compare types, use isinstance() # sometimes using type(x) == y is deliberately chosen to exclude # subclasses "E721", # flake8-fixme "FIX", # flake8-boolean-trap "FBT", # No implicit packages "INP001", # Errors should end with "Error" "N818", # mypy prevents blanket-type-ignore "PGH003", # Fixtures that do not return a value need an underscore prefix. The rule # does not handle generators. "PT004", # flake8-pytest-style, values rowtype (list|tuple) "PT007", # pytest.raises needs a match - eventually want to enable "PT011", "PT012", "PT013", # pylint "PLR", "PLE1507", # flake8-use-pathlib "PTH", # flake8-raise "RSE", # ruff # This seems to flag a load of false-positives, thinking that the # noqa's are # unneeded and trying to fix them even though ruff then flags the errors that # were previously ignored "RUF100", # flake8-todos "TD002", "TD003", # tryceratops "TRY003", "TRY401", # other errors that conflict with ruff format # indentation-with-invalid-multiple "W191", "E111", "E114", "E117", "E501", "D206", "D300", "Q000", "Q001", "Q002", "Q003", "COM812", "COM812", "ISC001", "ISC002", ] external = ["V"] task-tags = ["NOTE", "TODO", "FIXME", "XXX"] [tool.ruff.format] docstring-code-format = true indent-style = "space" line-ending = "lf" quote-style = "double" [tool.ruff.lint.per-file-ignores] # Imported but unused "__init__.py" = ["F401"] # pydantic 1 can't handle __future__ annotations-enabled syntax on < 3.10 "src/semantic_release/cli/config.py" = ["UP", "TCH"] "src/semantic_release/commit_parser/*" = ["UP", "FA", "TCH"] # Method argument not used. This is mostly a base class # anyway "src/semantic_release/hvcs/_base.py" = ["ARG002"] # from tests.fixtures import * is deliberate "tests/conftest.py" = ["F403"] "tests/fixtures/**/__init__.py" = ["F403"] "tests/*" = [ # unused arguments - likely fixtures to be moved to # @pytest.mark.usefixtures "ARG001", # "assert" used "S101", # possible hard-coded password assigned to argument # because we use "prerelease_token=..." and bandit panics # when it sees *_token variables "S105", "S106", "S107", # pseudo-random generators not suitable for cryptographic purposes # (e.g. random.choice used) "S311", # Private member access "SLF001", # Annotations "ANN", # Using format instead of f-string for readablity "UP032", ] [tool.ruff.lint.mccabe] max-complexity = 10 [tool.ruff.lint.flake8-implicit-str-concat] allow-multiline = true [tool.ruff.lint.flake8-quotes] inline-quotes = "double" multiline-quotes = "double" [tool.ruff.lint.flake8-tidy-imports] ban-relative-imports = "all" [tool.ruff.lint.flake8-type-checking] strict = true [tool.ruff.lint.flake8-pytest-style] fixture-parentheses = false mark-parentheses = false parametrize-names-type = "csv" [tool.ruff.lint.isort] # required-imports = ["from __future__ import annotations"] combine-as-imports = true known-first-party = ["semantic_release"] forced-separate = ["tests"] relative-imports-order = "closest-to-furthest" section-order = [ "future", "standard-library", "third-party", "first-party", "tests", "local-folder", ] sections = { "tests" = ["tests"] } [tool.vulture] ignore_names = ["change_to_ex_proj_dir", "init_example_project"] [tool.semantic_release] logging_use_named_masks = true commit_parser = "conventional" commit_parser_options = { parse_squash_commits = true, ignore_merge_commits = true } build_command = """ python -m pip install -e .[build] python -m build . """ major_on_zero = true version_variables = [ "src/gh_action/requirements.txt:python-semantic-release:nf", "docs/configuration/automatic-releases/github-actions.rst:python-semantic-release/python-semantic-release:tf", "docs/configuration/automatic-releases/github-actions.rst:python-semantic-release/publish-action:tf", ] version_toml = ["pyproject.toml:project.version"] [tool.semantic_release.changelog] # default_templates = { changelog_file = "CHANGELOG.rst" } exclude_commit_patterns = [ '''chore(?:\([^)]*?\))?: .+''', '''ci(?:\([^)]*?\))?: .+''', '''refactor(?:\([^)]*?\))?: .+''', '''style(?:\([^)]*?\))?: .+''', '''test(?:\([^)]*?\))?: .+''', '''build\((?!deps\): .+)''', '''Merged? .*''', '''Initial Commit.*''', # Old semantic-release version commits '''^\d+\.\d+\.\d+''', ] insertion_flag = "=========\nCHANGELOG\n=========" mode = "update" template_dir = "config/release-templates" [tool.semantic_release.branches.main] match = "^(main|master)$" prerelease = false prerelease_token = "rc" [tool.semantic_release.branches.alpha] match = "^(feat|fix|perf)/.+" prerelease = true prerelease_token = "alpha" [tool.semantic_release.branches.dev] match = ".+" prerelease = true prerelease_token = "dev" [tool.semantic_release.remote] type = "github" token = { env = "GH_TOKEN" } [tool.semantic_release.publish] upload_to_vcs_release = true python-semantic-release-10.4.1/scripts/000077500000000000000000000000001506116242600200275ustar00rootroot00000000000000python-semantic-release-10.4.1/scripts/__init__.py000066400000000000000000000000001506116242600221260ustar00rootroot00000000000000python-semantic-release-10.4.1/scripts/bump_version_in_docs.py000066400000000000000000000025161506116242600246130ustar00rootroot00000000000000# ruff: noqa: T201, allow print statements in non-prod scripts from __future__ import annotations from os import getenv from pathlib import Path from re import compile as regexp # Constants PROJ_DIR = Path(__file__).resolve().parent.parent DOCS_DIR = PROJ_DIR / "docs" version_replace_pattern = regexp(r"\$(NEW_VERSION|{NEW_VERSION})") tag_replace_pattern = regexp(r"\$(NEW_RELEASE_TAG|{NEW_RELEASE_TAG})") def envsubst(filepath: Path, version: str, release_tag: str) -> None: file_content = filepath.read_text() found = False for pattern, replacement in [ (version_replace_pattern, version), (tag_replace_pattern, release_tag), ]: if not found and (found := bool(pattern.search(file_content))): print(f"Applying envsubst to {filepath}") file_content = pattern.sub(replacement, file_content) filepath.write_text(file_content) if __name__ == "__main__": new_release_tag = getenv("NEW_RELEASE_TAG") new_version = getenv("NEW_VERSION") if not new_release_tag: print("NEW_RELEASE_TAG environment variable is not set") exit(1) if not new_version: print("NEW_VERSION environment variable is not set") exit(1) for doc_file in DOCS_DIR.rglob("*.rst"): envsubst(filepath=doc_file, version=new_version, release_tag=new_release_tag) python-semantic-release-10.4.1/src/000077500000000000000000000000001506116242600171275ustar00rootroot00000000000000python-semantic-release-10.4.1/src/gh_action/000077500000000000000000000000001506116242600210625ustar00rootroot00000000000000python-semantic-release-10.4.1/src/gh_action/.dockerignore000066400000000000000000000001061506116242600235330ustar00rootroot00000000000000# Default, ignore everything * # Except !requirements.txt !action.sh python-semantic-release-10.4.1/src/gh_action/Dockerfile000066400000000000000000000027501506116242600230600ustar00rootroot00000000000000# This Dockerfile is only for GitHub Actions FROM python:3.13-bookworm ARG WORK_DIR="/opt/psr" WORKDIR ${WORK_DIR} ENV PSR_DOCKER_GITHUB_ACTION=true \ PYTHONDONTWRITEBYTECODE=1 \ PSR_VENV_BIN="${WORK_DIR}/.venv/bin" # Copy action utilities into container COPY . ./ RUN \ # Install desired packages apt update && apt install -y --no-install-recommends \ # install git with git-lfs support git git-lfs \ # install python cmodule / binary module build utilities python3-dev gcc make cmake cargo \ # Configure global pip && { \ printf '%s\n' "[global]"; \ printf '%s\n' "disable-pip-version-check = true"; \ } > /etc/pip.conf \ # Create virtual environment for python-semantic-release && python3 -m venv "$(dirname "${PSR_VENV_BIN}")" \ # Update core utilities in the virtual environment && "${PSR_VENV_BIN}/pip" install --upgrade pip setuptools wheel \ # Install psr & its dependencies from source into virtual environment && "${PSR_VENV_BIN}/pip" install --pre -r requirements.txt \ # Validate binary availability && bash -c "${PSR_VENV_BIN}/semantic-release --help" \ # make action script executable && chmod +x "${WORK_DIR}/action.sh" \ # Put action script in PATH && ln -s "${WORK_DIR}/action.sh" /usr/local/bin/action-entrypoint \ # Clean up && apt clean && rm -rf /var/lib/apt/lists/* \ && find /tmp -mindepth 1 -delete ENTRYPOINT ["/usr/local/bin/action-entrypoint"] python-semantic-release-10.4.1/src/gh_action/action.sh000066400000000000000000000143661506116242600227050ustar00rootroot00000000000000#!/bin/bash set -e WORKSPACE_DIR="$(pwd)" explicit_run_cmd() { local cmd="" cmd="$(printf '%s' "$*" | sed 's/^ *//g' | sed 's/ *$//g')" printf '%s\n' "$> $cmd" eval "$cmd" } # Convert "true"/"false" into command line args, returns "" if not defined eval_boolean_action_input() { local -r input_name="$1" shift local -r flag_value="$1" shift local -r if_true="$1" shift local -r if_false="$1" if [ -z "$flag_value" ]; then printf "" elif [ "$flag_value" = "true" ]; then printf '%s\n' "$if_true" elif [ "$flag_value" = "false" ]; then printf '%s\n' "$if_false" else printf 'Error: Invalid value for input %s: %s is not "true" or "false\n"' \ "$input_name" "$flag_value" >&2 return 1 fi } # Convert string input into command line args, returns "" if undefined eval_string_input() { local -r input_name="$1" shift local -r if_defined="$1" shift local value value="$(printf '%s' "$1" | tr -d ' ')" if [ -z "$value" ]; then printf "" return 0 fi printf '%s' "${if_defined/\%s/$value}" } # Capture UID and GID of the external filesystem if [ ! -f "$WORKSPACE_DIR/.git/HEAD" ]; then echo "::error:: .git/HEAD file not found. Ensure you are in a valid git repository." exit 1 fi EXT_HOST_UID="$(stat -c '%u' "$WORKSPACE_DIR/.git/HEAD")" EXT_HOST_GID="$(stat -c '%g' "$WORKSPACE_DIR/.git/HEAD")" if [ -z "$EXT_HOST_UID" ] || [ -z "$EXT_HOST_GID" ]; then echo "Error: Unable to determine external filesystem UID/GID from .git/HEAD" exit 1 fi # Convert inputs to command line arguments ROOT_OPTIONS=() if ! printf '%s\n' "$INPUT_VERBOSITY" | grep -qE '^[0-9]+$'; then printf "Error: Input 'verbosity' must be a positive integer\n" >&2 exit 1 fi VERBOSITY_OPTIONS="" for ((i = 0; i < INPUT_VERBOSITY; i++)); do [ "$i" -eq 0 ] && VERBOSITY_OPTIONS="-" VERBOSITY_OPTIONS+="v" done ROOT_OPTIONS+=("$VERBOSITY_OPTIONS") if [ -n "$INPUT_CONFIG_FILE" ]; then # Check if the file exists if [ ! -f "$INPUT_CONFIG_FILE" ]; then printf "Error: Input 'config_file' does not exist: %s\n" "$INPUT_CONFIG_FILE" >&2 exit 1 fi ROOT_OPTIONS+=("$(eval_string_input "config_file" "--config %s" "$INPUT_CONFIG_FILE")") || exit 1 fi ROOT_OPTIONS+=("$(eval_boolean_action_input "strict" "$INPUT_STRICT" "--strict" "")") || exit 1 ROOT_OPTIONS+=("$(eval_boolean_action_input "no_operation_mode" "$INPUT_NO_OPERATION_MODE" "--noop" "")") || exit 1 ARGS=() # v10 Breaking change as prerelease should be as_prerelease to match ARGS+=("$(eval_boolean_action_input "prerelease" "$INPUT_PRERELEASE" "--as-prerelease" "")") || exit 1 ARGS+=("$(eval_boolean_action_input "commit" "$INPUT_COMMIT" "--commit" "--no-commit")") || exit 1 ARGS+=("$(eval_boolean_action_input "tag" "$INPUT_TAG" "--tag" "--no-tag")") || exit 1 ARGS+=("$(eval_boolean_action_input "push" "$INPUT_PUSH" "--push" "--no-push")") || exit 1 ARGS+=("$(eval_boolean_action_input "changelog" "$INPUT_CHANGELOG" "--changelog" "--no-changelog")") || exit 1 ARGS+=("$(eval_boolean_action_input "vcs_release" "$INPUT_VCS_RELEASE" "--vcs-release" "--no-vcs-release")") || exit 1 ARGS+=("$(eval_boolean_action_input "build" "$INPUT_BUILD" "" "--skip-build")") || exit 1 # Handle --patch, --minor, --major # https://stackoverflow.com/a/47541882 valid_force_levels=("prerelease" "patch" "minor" "major") if [ -z "$INPUT_FORCE" ]; then true # do nothing if 'force' input is not set elif printf '%s\0' "${valid_force_levels[@]}" | grep -Fxzq "$INPUT_FORCE"; then ARGS+=("--$INPUT_FORCE") else printf "Error: Input 'force' must be one of: %s\n" "${valid_force_levels[@]}" >&2 fi if [ -n "$INPUT_BUILD_METADATA" ]; then ARGS+=("--build-metadata $INPUT_BUILD_METADATA") fi if [ -n "$INPUT_PRERELEASE_TOKEN" ]; then ARGS+=("--prerelease-token $INPUT_PRERELEASE_TOKEN") fi # Change to configured directory cd "${INPUT_DIRECTORY}" # Set Git details if ! [ "${INPUT_GIT_COMMITTER_NAME:="-"}" = "-" ]; then git config --global user.name "$INPUT_GIT_COMMITTER_NAME" fi if ! [ "${INPUT_GIT_COMMITTER_EMAIL:="-"}" = "-" ]; then git config --global user.email "$INPUT_GIT_COMMITTER_EMAIL" fi if [ "${INPUT_GIT_COMMITTER_NAME:="-"}" != "-" ] && [ "${INPUT_GIT_COMMITTER_EMAIL:="-"}" != "-" ]; then # Must export this value to the environment for PSR to consume the override export GIT_COMMIT_AUTHOR="$INPUT_GIT_COMMITTER_NAME <$INPUT_GIT_COMMITTER_EMAIL>" fi # See https://github.com/actions/runner-images/issues/6775#issuecomment-1409268124 # and https://github.com/actions/runner-images/issues/6775#issuecomment-1410270956 git config --system --add safe.directory "*" if [[ -n "$INPUT_SSH_PUBLIC_SIGNING_KEY" && -n "$INPUT_SSH_PRIVATE_SIGNING_KEY" ]]; then echo "SSH Key pair found, configuring signing..." # Write keys to disk mkdir -vp ~/.ssh echo -e "$INPUT_SSH_PUBLIC_SIGNING_KEY" >>~/.ssh/signing_key.pub cat ~/.ssh/signing_key.pub echo -e "$INPUT_SSH_PRIVATE_SIGNING_KEY" >>~/.ssh/signing_key # DO NOT CAT private key for security reasons sha256sum ~/.ssh/signing_key # Ensure read only private key chmod 400 ~/.ssh/signing_key # Enable ssh-agent & add signing key eval "$(ssh-agent -s)" ssh-add ~/.ssh/signing_key # Create allowed_signers file for git if [ "${INPUT_GIT_COMMITTER_EMAIL:="-"}" = "-" ]; then echo >&2 "git_committer_email must be set to use SSH key signing!" exit 1 fi touch ~/.ssh/allowed_signers echo "$INPUT_GIT_COMMITTER_EMAIL $INPUT_SSH_PUBLIC_SIGNING_KEY" >~/.ssh/allowed_signers # Configure git for signing git config --global gpg.format ssh git config --global gpg.ssh.allowedSignersFile ~/.ssh/allowed_signers git config --global user.signingKey ~/.ssh/signing_key git config --global commit.gpgsign true git config --global tag.gpgsign true fi # Copy inputs into correctly-named environment variables export GH_TOKEN="${INPUT_GITHUB_TOKEN}" # normalize extra spaces into single spaces as you combine the arguments CMD_ARGS="$(printf '%s' "${ROOT_OPTIONS[*]} version ${ARGS[*]}" | sed 's/ [ ]*/ /g' | sed 's/^ *//g')" # Make sure the workspace directory is owned by the external filesystem UID/GID no matter what # This is to ensure that after the action, and a commit was created, the files are owned by the external filesystem trap "chown -R $EXT_HOST_UID:$EXT_HOST_GID '$WORKSPACE_DIR'" EXIT # Run Semantic Release (explicitly use the GitHub action version) explicit_run_cmd "$PSR_VENV_BIN/semantic-release $CMD_ARGS" python-semantic-release-10.4.1/src/gh_action/requirements.txt000066400000000000000000000000421506116242600243420ustar00rootroot00000000000000python-semantic-release == 10.4.1 python-semantic-release-10.4.1/src/semantic_release/000077500000000000000000000000001506116242600224325ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/__init__.py000066400000000000000000000024451506116242600245500ustar00rootroot00000000000000"""Python Semantic Release""" from __future__ import annotations import importlib.metadata from semantic_release.commit_parser import ( CommitParser, ParsedCommit, ParseError, ParseResult, ParseResultType, ParserOptions, ) from semantic_release.enums import LevelBump from semantic_release.errors import ( CommitParseError, InvalidConfiguration, InvalidVersion, SemanticReleaseBaseError, ) from semantic_release.version import ( Version, VersionTranslator, next_version, tags_and_versions, ) __version__ = importlib.metadata.version(f"python_{__package__}".replace("_", "-")) __all__ = [ "CommitParser", "ParsedCommit", "ParseError", "ParseResult", "ParseResultType", "ParserOptions", "LevelBump", "SemanticReleaseBaseError", "CommitParseError", "InvalidConfiguration", "InvalidVersion", "Version", "VersionTranslator", "next_version", "tags_and_versions", ] def setup_hook(argv: list[str]) -> None: """ A hook to be used in setup.py to enable `python setup.py publish`. :param argv: sys.argv """ if len(argv) > 1 and any( cmd in argv for cmd in ["version", "publish", "changelog"] ): from semantic_release.cli.commands.main import main main() python-semantic-release-10.4.1/src/semantic_release/__main__.py000066400000000000000000000032501506116242600245240ustar00rootroot00000000000000"""Entrypoint for the `semantic-release` module.""" # ruff: noqa: T201, print statements are fine here as this is for cli entry only from __future__ import annotations import sys from traceback import format_exception from semantic_release import globals from semantic_release.cli.commands.main import main as cli_main from semantic_release.enums import SemanticReleaseLogLevels def main() -> None: try: cli_main(args=sys.argv[1:]) print("semantic-release completed successfully.", file=sys.stderr) except KeyboardInterrupt: print("\n-- User Abort! --", file=sys.stderr) sys.exit(127) except Exception as err: # noqa: BLE001, graceful error handling across application if globals.log_level <= SemanticReleaseLogLevels.DEBUG: print(f"{err.__class__.__name__}: {err}\n", file=sys.stderr) etype, value, traceback = sys.exc_info() print( str.join( "", format_exception( etype, value, traceback, limit=None, chain=True, )[:-1], ), file=sys.stderr, ) print( str.join("\n", [f"::ERROR:: {line}" for line in str(err).splitlines()]), file=sys.stderr, ) if globals.log_level > SemanticReleaseLogLevels.DEBUG: print( "Run semantic-release in very verbose mode (-vv) to see the full traceback.", file=sys.stderr, ) sys.exit(1) if __name__ == "__main__": main() python-semantic-release-10.4.1/src/semantic_release/changelog/000077500000000000000000000000001506116242600243615ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/changelog/__init__.py000066400000000000000000000004061506116242600264720ustar00rootroot00000000000000from semantic_release.changelog.context import ( ChangelogContext, make_changelog_context, ) from semantic_release.changelog.release_history import ReleaseHistory from semantic_release.changelog.template import ( environment, recursive_render, ) python-semantic-release-10.4.1/src/semantic_release/changelog/context.py000066400000000000000000000135441506116242600264260ustar00rootroot00000000000000from __future__ import annotations import logging import os from dataclasses import dataclass from enum import Enum from pathlib import Path, PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING, Any, Callable, Literal from urllib3.util import Url from semantic_release.const import PYPI_WEB_DOMAIN from semantic_release.helpers import sort_numerically if TYPE_CHECKING: # pragma: no cover from jinja2 import Environment from semantic_release.changelog.release_history import Release, ReleaseHistory from semantic_release.hvcs._base import HvcsBase from semantic_release.version.version import Version @dataclass class ReleaseNotesContext: repo_name: str repo_owner: str hvcs_type: str version: Version release: Release mask_initial_release: bool license_name: str filters: tuple[Callable[..., Any], ...] = () def bind_to_environment(self, env: Environment) -> Environment: env_globals = dict( filter(lambda k_v: k_v[0] != "filters", self.__dict__.items()) ) for g, v in env_globals.items(): env.globals[g] = v for f in self.filters: env.filters[f.__name__] = f return env class ChangelogMode(Enum): INIT = "init" UPDATE = "update" @dataclass class ChangelogContext: repo_name: str repo_owner: str hvcs_type: str history: ReleaseHistory changelog_mode: Literal["update", "init"] prev_changelog_file: str changelog_insertion_flag: str mask_initial_release: bool filters: tuple[Callable[..., Any], ...] = () def bind_to_environment(self, env: Environment) -> Environment: env.globals["context"] = self env.globals["ctx"] = self for f in self.filters: env.filters[f.__name__] = f return env def make_changelog_context( hvcs_client: HvcsBase, release_history: ReleaseHistory, mode: ChangelogMode, prev_changelog_file: Path, insertion_flag: str, mask_initial_release: bool, ) -> ChangelogContext: return ChangelogContext( repo_name=hvcs_client.repo_name, repo_owner=hvcs_client.owner, history=release_history, changelog_mode=mode.value, changelog_insertion_flag=insertion_flag, mask_initial_release=mask_initial_release, prev_changelog_file=str(prev_changelog_file), hvcs_type=hvcs_client.__class__.__name__.lower(), filters=( *hvcs_client.get_changelog_context_filters(), create_pypi_url, read_file, convert_md_to_rst, autofit_text_width, sort_numerically, ), ) def create_pypi_url(package_name: str, version: str = "") -> str: project_name = package_name.strip("/").strip() if not project_name: raise ValueError("package_name must not be empty!") return Url( scheme="https", host=PYPI_WEB_DOMAIN, path=str(PurePosixPath("project", project_name, version.strip("/").strip())), ).url.rstrip("/") def read_file(filepath: str) -> str: try: if not filepath: raise FileNotFoundError("No file path provided") # noqa: TRY301 with Path(filepath).open(newline=os.linesep) as rfd: return rfd.read() except FileNotFoundError as err: logging.warning(err) return "" def convert_md_to_rst(md_content: str) -> str: rst_content = md_content replacements = { # Replace markdown doubleunder bold with rst bold "bold-inline": (regexp(r"(?<=\s)__(.+?)__(?=\s|$)"), r"**\1**"), # Replace markdown italics with rst italics "italic-inline": (regexp(r"(?<=\s)_([^_].+?[^_])_(?=\s|$)"), r"*\1*"), # Replace markdown bullets with rst bullets "bullets": (regexp(r"^(\s*)-(\s)"), r"\1*\2"), # Replace markdown inline raw content with rst inline raw content "raw-inline": (regexp(r"(?<=\s)(`[^`]+`)(?![`_])"), r"`\1`"), # Replace markdown inline link with rst inline link "link-inline": ( regexp(r"(?<=\s)\[([^\]]+)\]\(([^)]+)\)(?=\s|$)"), r"`\1 <\2>`_", ), } for pattern, replacement in replacements.values(): rst_content = pattern.sub(replacement, rst_content) return rst_content def autofit_text_width(text: str, maxwidth: int = 100, indent_size: int = 0) -> str: """Format the description text to fit within a specified width""" input_text = text.strip() if len(input_text) <= maxwidth: # If the text is already within the maxwidth, return immediately return input_text indent = " " * indent_size formatted_description = [] # Re-format text to fit within the maxwidth for paragraph in input_text.split("\n\n"): formatted_paragraph = [] # Split the paragraph into words with no empty strings words = list( filter( None, paragraph.replace("\r", "").replace("\n", " ").strip().split(" ") ) ) # Initialize the line for each paragraph line = words[0] next_line = "" for word in words[1:]: # Check if the current line + the next word (and a space) will fit within the maxwidth # If it does, then update the current line next_line = f"{line} {word}" if len(next_line) <= maxwidth: line = next_line continue # Add the current line to the paragraph and start a new line formatted_paragraph.append(line) line = f"{indent}{word}" # Store the last line in the paragraph since it hasn't reached the maxwidth yet formatted_paragraph.append(line) # formatted_description.append(str.join("\n", formatted_paragraph)) # Print the formatted description return str.join("\n\n", formatted_description).strip() python-semantic-release-10.4.1/src/semantic_release/changelog/release_history.py000066400000000000000000000245521506116242600301440ustar00rootroot00000000000000from __future__ import annotations from collections import defaultdict from datetime import datetime, timedelta, timezone from typing import TYPE_CHECKING, TypedDict from git.objects.tag import TagObject from semantic_release.commit_parser import ParseError from semantic_release.commit_parser.token import ParsedCommit from semantic_release.commit_parser.util import force_str from semantic_release.enums import LevelBump from semantic_release.globals import logger from semantic_release.helpers import validate_types_in_sequence from semantic_release.version.algorithm import tags_and_versions if TYPE_CHECKING: # pragma: no cover from re import Pattern from typing import Iterable, Iterator from git.repo.base import Repo from git.util import Actor from semantic_release.commit_parser import ( CommitParser, ParseResult, ParserOptions, ) from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version class ReleaseHistory: @classmethod def from_git_history( cls, repo: Repo, translator: VersionTranslator, commit_parser: CommitParser[ParseResult, ParserOptions], exclude_commit_patterns: Iterable[Pattern[str]] = (), ) -> ReleaseHistory: all_git_tags_and_versions = tags_and_versions(repo.tags, translator) unreleased: dict[str, list[ParseResult]] = defaultdict(list) released: dict[Version, Release] = {} # Performance optimization: create a mapping of tag sha to version # so we can quickly look up the version for a given commit based on sha tag_sha_2_version_lookup = { tag.commit.hexsha: (tag, version) for tag, version in all_git_tags_and_versions } ignore_merge_commits = bool( hasattr(commit_parser, "options") and hasattr(commit_parser.options, "ignore_merge_commits") and getattr(commit_parser.options, "ignore_merge_commits") # noqa: B009 ) # Strategy: # Loop through commits in history, parsing as we go. # Add these commits to `unreleased` as a key-value mapping # of type_ to ParseResult, until we encounter a tag # which matches a commit. # Then, we add the version for that tag as a key to `released`, # and set the value to an empty dict. Into that empty dict # we place the key-value mapping type_ to ParseResult as before. # We do this until we encounter a commit which another tag matches. the_version: Version | None = None for commit in repo.iter_commits("HEAD", topo_order=True): # Determine if we have found another release logger.debug("checking if commit %s matches any tags", commit.hexsha[:7]) t_v = tag_sha_2_version_lookup.get(commit.hexsha, None) if t_v is None: logger.debug("no tags correspond to commit %s", commit.hexsha) else: # Unpack the tuple (overriding the current version) tag, the_version = t_v # we have found the latest commit introduced by this tag # so we create a new Release entry logger.debug("found commit %s for tag %s", commit.hexsha, tag.name) # tag.object is a Commit if the tag is lightweight, otherwise # it is a TagObject with additional metadata about the tag if isinstance(tag.object, TagObject): tagger = tag.object.tagger committer = tag.object.tagger.committer() _tz = timezone(timedelta(seconds=-1 * tag.object.tagger_tz_offset)) tagged_date = datetime.fromtimestamp(tag.object.tagged_date, tz=_tz) else: # For some reason, sometimes tag.object is a Commit tagger = tag.object.author committer = tag.object.author _tz = timezone(timedelta(seconds=-1 * tag.object.author_tz_offset)) tagged_date = datetime.fromtimestamp( tag.object.committed_date, tz=_tz ) release = Release( tagger=tagger, committer=committer, tagged_date=tagged_date, elements=defaultdict(list), version=the_version, ) released.setdefault(the_version, release) logger.info( "parsing commit [%s] %s", commit.hexsha[:8], str(commit.message).replace("\n", " ")[:54], ) # returns a ParseResult or list of ParseResult objects, # it is usually one, but we split a commit if a squashed merge is detected parse_results = commit_parser.parse(commit) if not any( ( isinstance(parse_results, (ParseError, ParsedCommit)), ( ( isinstance(parse_results, list) or type(parse_results) == tuple ) and validate_types_in_sequence( parse_results, (ParseError, ParsedCommit) ) ), ) ): raise TypeError("Unexpected type returned from commit_parser.parse") results: list[ParseResult] = [ *( [parse_results] if isinstance(parse_results, (ParseError, ParsedCommit)) else parse_results ), ] is_squash_commit = bool(len(results) > 1) # iterate through parsed commits to add to changelog definition for parsed_result in results: commit_message = force_str(parsed_result.commit.message) commit_type = ( "unknown" if isinstance(parsed_result, ParseError) else parsed_result.type ) logger.debug("commit has type '%s'", commit_type) has_exclusion_match = any( pattern.match(commit_message) for pattern in exclude_commit_patterns ) commit_level_bump = ( LevelBump.NO_RELEASE if isinstance(parsed_result, ParseError) else parsed_result.bump ) if ignore_merge_commits and parsed_result.is_merge_commit(): logger.info("Excluding merge commit[%s]", parsed_result.short_hash) continue # Skip excluded commits except for any commit causing a version bump # Reasoning: if a commit causes a version bump, and no other commits # are included, then the changelog will be empty. Even if ther was other # commits included, the true reason for a version bump would be missing. if has_exclusion_match and commit_level_bump == LevelBump.NO_RELEASE: logger.info( "Excluding %s commit[%s] %s", "piece of squashed" if is_squash_commit else "", parsed_result.short_hash, commit_message.split("\n", maxsplit=1)[0][:20], ) continue if ( isinstance(parsed_result, ParsedCommit) and not parsed_result.include_in_changelog ): logger.info( str.join( " ", [ "Excluding commit[%s] because parser determined", "it should not included in the changelog", ], ), parsed_result.short_hash, ) continue if the_version is None: logger.info( "[Unreleased] adding commit[%s] to unreleased '%s'", parsed_result.short_hash, commit_type, ) unreleased[commit_type].append(parsed_result) continue logger.info( "[%s] adding commit[%s] to release '%s'", the_version, parsed_result.short_hash, commit_type, ) released[the_version]["elements"][commit_type].append(parsed_result) return cls(unreleased=unreleased, released=released) def __init__( self, unreleased: dict[str, list[ParseResult]], released: dict[Version, Release] ) -> None: self.released = released self.unreleased = unreleased def __iter__( self, ) -> Iterator[dict[str, list[ParseResult]] | dict[Version, Release]]: """ Enables unpacking: >>> rh = ReleaseHistory(...) >>> unreleased, released = rh """ yield self.unreleased yield self.released def release( self, version: Version, tagger: Actor, committer: Actor, tagged_date: datetime ) -> ReleaseHistory: if version in self.released: raise ValueError(f"{version} has already been released!") # return a new instance to avoid potential accidental # mutation return ReleaseHistory( unreleased={}, released={ version: { "tagger": tagger, "committer": committer, "tagged_date": tagged_date, "elements": self.unreleased, "version": version, }, **self.released, }, ) def __repr__(self) -> str: return ( f"<{type(self).__qualname__}: " f"{sum(len(commits) for commits in self.unreleased.values())} " f"commits unreleased, {len(self.released)} versions released>" ) class Release(TypedDict): tagger: Actor committer: Actor tagged_date: datetime elements: dict[str, list[ParseResult]] version: Version python-semantic-release-10.4.1/src/semantic_release/changelog/template.py000066400000000000000000000130771506116242600265560ustar00rootroot00000000000000from __future__ import annotations import os import shutil from pathlib import Path, PurePosixPath from typing import TYPE_CHECKING from jinja2 import FileSystemLoader from jinja2.sandbox import SandboxedEnvironment from semantic_release.globals import logger from semantic_release.helpers import dynamic_import if TYPE_CHECKING: # pragma: no cover from typing import Callable, Iterable, Literal from jinja2 import Environment # pylint: disable=too-many-arguments,too-many-locals def environment( template_dir: Path | str = ".", block_start_string: str = "{%", block_end_string: str = "%}", variable_start_string: str = "{{", variable_end_string: str = "}}", comment_start_string: str = "{#", comment_end_string: str = "#}", line_statement_prefix: str | None = None, line_comment_prefix: str | None = None, trim_blocks: bool = False, lstrip_blocks: bool = False, newline_sequence: Literal["\n", "\r", "\r\n"] = "\n", keep_trailing_newline: bool = False, extensions: Iterable[str] = (), autoescape: bool | str = True, ) -> SandboxedEnvironment: """ Create a jinja2.sandbox.SandboxedEnvironment with certain parameter resrictions. For example the Loader is fixed to FileSystemLoader, although the searchpath is configurable. ``autoescape`` can be a string in which case it should follow the convention ``module:attr``, in this instance it will be dynamically imported. See https://jinja.palletsprojects.com/en/3.1.x/api/#jinja2.Environment for full parameter descriptions """ autoescape_value: bool | Callable[[str | None], bool] if isinstance(autoescape, str): autoescape_value = dynamic_import(autoescape) else: autoescape_value = autoescape return ComplexDirectorySandboxedEnvironment( block_start_string=block_start_string, block_end_string=block_end_string, variable_start_string=variable_start_string, variable_end_string=variable_end_string, comment_start_string=comment_start_string, comment_end_string=comment_end_string, line_statement_prefix=line_statement_prefix, line_comment_prefix=line_comment_prefix, trim_blocks=trim_blocks, lstrip_blocks=lstrip_blocks, newline_sequence=newline_sequence, keep_trailing_newline=keep_trailing_newline, extensions=extensions, autoescape=autoescape_value, loader=FileSystemLoader(template_dir, encoding="utf-8"), ) class ComplexDirectorySandboxedEnvironment(SandboxedEnvironment): def join_path(self, template: str, parent: str) -> str: """ Add support for complex directory structures in the template directory. This method overrides the default functionality of the SandboxedEnvironment where all 'include' keywords expect to be in the same directory as the calling template, however this is unintuitive when using a complex directory structure. This override simulates the changing of directories when you include the template from a child directory. When the child then includes a template, it will make the path relative to the child directory rather than the top level template directory. """ # Must be posixpath because jinja only knows how to handle posix path includes return str(PurePosixPath(parent).parent / template) def recursive_render( template_dir: Path, environment: Environment, _root_dir: str | os.PathLike[str] = ".", ) -> list[str]: rendered_paths: list[str] = [] for root, file in ( (Path(root), file) for root, _, files in os.walk(template_dir) for file in files if not any( elem.startswith(".") for elem in Path(root).relative_to(template_dir).parts ) and not file.startswith(".") ): output_path = (_root_dir / root.relative_to(template_dir)).resolve() logger.info("Rendering templates from %s to %s", root, output_path) output_path.mkdir(parents=True, exist_ok=True) if file.endswith(".j2"): # We know the file ends with .j2 by the filter in the for-loop output_filename = file[:-3] # Strip off the template directory from the front of the root path - # that's the output location relative to the repo root src_file_path = str((root / file).relative_to(template_dir)) output_file_path = str((output_path / output_filename).resolve()) # Although, file stream rendering is possible and preferred in most # situations, here it is not desired as you cannot read the previous # contents of a file during the rendering of the template. This mechanism # is used for inserting into a current changelog. When using stream rendering # of the same file, it always came back empty logger.debug("rendering %s to %s", src_file_path, output_file_path) rendered_file = environment.get_template(src_file_path).render().rstrip() with open(output_file_path, "w", encoding="utf-8") as output_file: output_file.write(f"{rendered_file}\n") rendered_paths.append(output_file_path) else: src_file = str((root / file).resolve()) target_file = str((output_path / file).resolve()) logger.debug( "source file %s is not a template, copying to %s", src_file, target_file ) shutil.copyfile(src_file, target_file) rendered_paths.append(target_file) return rendered_paths python-semantic-release-10.4.1/src/semantic_release/cli/000077500000000000000000000000001506116242600232015ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/cli/__init__.py000066400000000000000000000000001506116242600253000ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/cli/changelog_writer.py000066400000000000000000000221041506116242600270750ustar00rootroot00000000000000from __future__ import annotations import os from contextlib import suppress from pathlib import Path from typing import TYPE_CHECKING # NOTE: use backport with newer API than stdlib from importlib_resources import files import semantic_release from semantic_release.changelog.context import ( ReleaseNotesContext, autofit_text_width, create_pypi_url, make_changelog_context, ) from semantic_release.changelog.template import environment, recursive_render from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.cli.const import ( DEFAULT_CHANGELOG_NAME_STEM, DEFAULT_RELEASE_NOTES_TPL_FILE, JINJA2_EXTENSION, ) from semantic_release.cli.util import noop_report from semantic_release.errors import InternalError from semantic_release.globals import logger from semantic_release.helpers import sort_numerically if TYPE_CHECKING: # pragma: no cover from jinja2 import Environment from semantic_release.changelog.context import ChangelogContext from semantic_release.changelog.release_history import Release, ReleaseHistory from semantic_release.cli.config import RuntimeContext from semantic_release.hvcs._base import HvcsBase def get_default_tpl_dir(style: str, sub_dir: str | None = None) -> Path: module_base_path = Path(str(files(semantic_release.__name__))) default_templates_path = module_base_path.joinpath( f"data/templates/{style}", "" if sub_dir is None else sub_dir.strip("/"), ) if default_templates_path.is_dir(): return default_templates_path raise InternalError( str.join( " ", [ "Default template directory not found at", f"{default_templates_path}. Installation corrupted!", ], ) ) def render_default_changelog_file( output_format: ChangelogOutputFormat, changelog_context: ChangelogContext, changelog_style: str, ) -> str: tpl_dir = get_default_tpl_dir(style=changelog_style, sub_dir=output_format.value) changelog_tpl_file = Path(DEFAULT_CHANGELOG_NAME_STEM).with_suffix( str.join(".", ["", output_format.value, JINJA2_EXTENSION.lstrip(".")]) ) # Create a new environment as we don't want user's configuration as it might # not match our default template structure template_env = changelog_context.bind_to_environment( environment( autoescape=False, newline_sequence="\n", template_dir=tpl_dir, ) ) # Using the proper enviroment with the changelog context, render the template template = template_env.get_template(str(changelog_tpl_file)) changelog_content = template.render().rstrip() # Normalize line endings to ensure universal newlines because that is what is expected # of the content when we write it to a file. When using pathlib.Path.write_text(), it # will automatically normalize the file to the OS. At this point after render, we may # have mixed line endings because of the read_file() call of the previous changelog # (which may be /r/n or /n) return str.join( "\n", [line.replace("\r", "") for line in changelog_content.split("\n")] ) def render_release_notes( release_notes_template_file: str, template_env: Environment, ) -> str: # NOTE: release_notes_template_file must be a relative path to the template directory # because jinja2's filtering and template loading filter is janky template = template_env.get_template(release_notes_template_file) release_notes = template.render().rstrip() + os.linesep # Normalize line endings to match the current platform return str.join( os.linesep, [line.replace("\r", "") for line in release_notes.split("\n")] ) def apply_user_changelog_template_directory( template_dir: Path, environment: Environment, destination_dir: Path, noop: bool = False, ) -> list[str]: if noop: noop_report( str.join( " ", [ "would have recursively rendered the template directory", f"{template_dir!r} relative to {destination_dir!r}.", "Paths which would be modified by this operation cannot be", "determined in no-op mode.", ], ) ) return [] return recursive_render( template_dir, environment=environment, _root_dir=destination_dir ) def write_default_changelog( changelog_file: Path, destination_dir: Path, output_format: ChangelogOutputFormat, changelog_context: ChangelogContext, changelog_style: str, noop: bool = False, ) -> str: if noop: noop_report( str.join( " ", [ "would have written your changelog to", str(changelog_file.relative_to(destination_dir)), ], ) ) return str(changelog_file) changelog_text = render_default_changelog_file( output_format=output_format, changelog_context=changelog_context, changelog_style=changelog_style, ) # write_text() will automatically normalize newlines to the OS, so we just use an universal newline here changelog_file.write_text(f"{changelog_text}\n", encoding="utf-8") return str(changelog_file) def write_changelog_files( runtime_ctx: RuntimeContext, release_history: ReleaseHistory, hvcs_client: HvcsBase, noop: bool = False, ) -> list[str]: project_dir = Path(runtime_ctx.repo_dir) template_dir = runtime_ctx.template_dir changelog_context = make_changelog_context( hvcs_client=hvcs_client, release_history=release_history, mode=runtime_ctx.changelog_mode, insertion_flag=runtime_ctx.changelog_insertion_flag, prev_changelog_file=runtime_ctx.changelog_file, mask_initial_release=runtime_ctx.changelog_mask_initial_release, ) user_templates = [] # Update known templates list if Directory exists and directory has actual files to render if template_dir.is_dir(): user_templates.extend( [ f for f in template_dir.rglob("*") if f.is_file() and f.suffix == JINJA2_EXTENSION ] ) with suppress(ValueError): # do not include a release notes override when considering number of changelog templates user_templates.remove(template_dir / DEFAULT_RELEASE_NOTES_TPL_FILE) # Render user templates if found if len(user_templates) > 0: return apply_user_changelog_template_directory( template_dir=template_dir, environment=changelog_context.bind_to_environment( runtime_ctx.template_environment ), destination_dir=project_dir, noop=noop, ) logger.info( "No contents found in %r, using default changelog template", template_dir ) return [ write_default_changelog( changelog_file=runtime_ctx.changelog_file, destination_dir=project_dir, output_format=runtime_ctx.changelog_output_format, changelog_context=changelog_context, changelog_style=runtime_ctx.changelog_style, noop=noop, ) ] def generate_release_notes( hvcs_client: HvcsBase, release: Release, template_dir: Path, history: ReleaseHistory, style: str, mask_initial_release: bool, license_name: str = "", ) -> str: users_tpl_file = template_dir / DEFAULT_RELEASE_NOTES_TPL_FILE # Determine if the user has a custom release notes template or we should use # the default template directory with our default release notes template tpl_dir = ( template_dir if users_tpl_file.is_file() else get_default_tpl_dir( style=style, sub_dir=ChangelogOutputFormat.MARKDOWN.value ) ) release_notes_tpl_file = ( users_tpl_file.name if users_tpl_file.is_file() else DEFAULT_RELEASE_NOTES_TPL_FILE ) release_notes_env = ReleaseNotesContext( repo_name=hvcs_client.repo_name, repo_owner=hvcs_client.owner, hvcs_type=hvcs_client.__class__.__name__.lower(), version=release["version"], release=release, mask_initial_release=mask_initial_release, license_name=license_name, filters=( *hvcs_client.get_changelog_context_filters(), create_pypi_url, autofit_text_width, sort_numerically, ), ).bind_to_environment( # Use a new, non-configurable environment for release notes - # not user-configurable at the moment environment(autoescape=False, template_dir=tpl_dir) ) # TODO: Remove in v11 release_notes_env.globals["context"] = release_notes_env.globals["ctx"] = { "history": history, "mask_initial_release": mask_initial_release, } return render_release_notes( release_notes_template_file=release_notes_tpl_file, template_env=release_notes_env, ) python-semantic-release-10.4.1/src/semantic_release/cli/cli_context.py000066400000000000000000000100461506116242600260670ustar00rootroot00000000000000from __future__ import annotations import logging from pathlib import Path from typing import TYPE_CHECKING import click from click.core import ParameterSource from git import InvalidGitRepositoryError from pydantic import ValidationError from semantic_release.cli.config import ( RawConfig, RuntimeContext, ) from semantic_release.cli.util import load_raw_config_file, rprint from semantic_release.errors import ( DetachedHeadGitError, InvalidConfiguration, NotAReleaseBranch, ) if TYPE_CHECKING: # pragma: no cover from semantic_release.cli.config import GlobalCommandLineOptions class CliContext(click.Context): obj: CliContextObj class CliContextObj: def __init__( self, ctx: click.Context, logger: logging.Logger, global_opts: GlobalCommandLineOptions, ) -> None: self.ctx = ctx self.logger = logger self.global_opts = global_opts self._raw_config: RawConfig | None = None self._runtime_ctx: RuntimeContext | None = None @property def raw_config(self) -> RawConfig: if self._raw_config is None: self._raw_config = self._init_raw_config() return self._raw_config @property def runtime_ctx(self) -> RuntimeContext: """ Lazy load the runtime context. This is done to avoid configuration loading when the command is not run. This is useful for commands like `--help` and `--version` """ if self._runtime_ctx is None: self._runtime_ctx = self._init_runtime_ctx() return self._runtime_ctx def _init_raw_config(self) -> RawConfig: config_path = Path(self.global_opts.config_file) conf_file_exists = config_path.exists() was_conf_file_user_provided = bool( self.ctx.get_parameter_source("config_file") not in ( ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP, ) ) # TODO: Evaluate Exeception catches try: if was_conf_file_user_provided and not conf_file_exists: raise FileNotFoundError( # noqa: TRY301 f"File {self.global_opts.config_file} does not exist" ) config_obj = ( {} if not conf_file_exists else load_raw_config_file(config_path) ) if not config_obj: self.logger.info( "configuration empty, falling back to default configuration" ) return RawConfig.model_validate(config_obj) except FileNotFoundError as exc: click.echo(str(exc), err=True) self.ctx.exit(2) except ( ValidationError, InvalidConfiguration, InvalidGitRepositoryError, ) as exc: click.echo(str(exc), err=True) self.ctx.exit(1) def _init_runtime_ctx(self) -> RuntimeContext: # TODO: Evaluate Exception catches try: runtime = RuntimeContext.from_raw_config( self.raw_config, global_cli_options=self.global_opts, ) except NotAReleaseBranch as exc: rprint(f"[bold {'red' if self.global_opts.strict else 'orange1'}]{exc!s}") # If not strict, exit 0 so other processes can continue. For example, in # multibranch CI it might be desirable to run a non-release branch's pipeline # without specifying conditional execution of PSR based on branch name self.ctx.exit(2 if self.global_opts.strict else 0) except ( DetachedHeadGitError, InvalidConfiguration, InvalidGitRepositoryError, ValidationError, ) as exc: click.echo(str(exc), err=True) self.ctx.exit(1) # This allows us to mask secrets in the logging # by applying it to all the configured handlers for handler in logging.getLogger().handlers: handler.addFilter(runtime.masker) return runtime python-semantic-release-10.4.1/src/semantic_release/cli/commands/000077500000000000000000000000001506116242600250025ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/cli/commands/__init__.py000066400000000000000000000000001506116242600271010ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/cli/commands/changelog.py000066400000000000000000000124171506116242600273100ustar00rootroot00000000000000from __future__ import annotations from contextlib import suppress from pathlib import Path from typing import TYPE_CHECKING import click import tomlkit from git import GitCommandError, Repo from semantic_release.changelog.release_history import ReleaseHistory from semantic_release.cli.changelog_writer import ( generate_release_notes, write_changelog_files, ) from semantic_release.cli.util import noop_report from semantic_release.globals import logger from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase if TYPE_CHECKING: # pragma: no cover from semantic_release.cli.cli_context import CliContextObj def get_license_name_for_release(tag_name: str, project_root: Path) -> str: # Retrieve the license name at the time of the specific release tag project_metadata: dict[str, str] = {} curr_dir = Path.cwd().resolve() allowed_directories = [ dir_path for dir_path in [curr_dir, *curr_dir.parents] if str(project_root) in str(dir_path) ] for allowed_dir in allowed_directories: proj_toml = allowed_dir.joinpath("pyproject.toml") with Repo(project_root) as git_repo, suppress(GitCommandError): toml_contents = git_repo.git.show( f"{tag_name}:{proj_toml.relative_to(project_root)}" ) config_toml = tomlkit.parse(toml_contents) project_metadata = config_toml.unwrap().get("project", project_metadata) break license_cfg = project_metadata.get( "license-expression", project_metadata.get( "license", "", ), ) if not isinstance(license_cfg, (str, dict)) or license_cfg is None: return "" return ( license_cfg.get("text", "") # type: ignore[attr-defined] if isinstance(license_cfg, dict) else license_cfg or "" ) def post_release_notes( release_tag: str, release_notes: str, prerelease: bool, hvcs_client: RemoteHvcsBase, noop: bool = False, ) -> None: if noop: noop_report( str.join( "\n", [ f"would have posted the following release notes for tag {release_tag}:", # Escape square brackets to ensure all content is displayed in the console # (i.e. prevent interpretation of ansi escape sequences that is valid markdown) release_notes.replace("[", "\\["), ], ) ) return hvcs_client.create_or_update_release( release_tag, release_notes, prerelease=prerelease, ) @click.command( short_help="Generate a changelog", context_settings={ "help_option_names": ["-h", "--help"], }, ) @click.option( "--post-to-release-tag", "release_tag", default=None, help="Post the generated release notes to the remote VCS's release for this tag", ) @click.pass_obj def changelog(cli_ctx: CliContextObj, release_tag: str | None) -> None: """Generate and optionally publish a changelog for your project""" ctx = click.get_current_context() runtime = cli_ctx.runtime_ctx translator = runtime.version_translator hvcs_client = runtime.hvcs_client with Repo(str(runtime.repo_dir)) as git_repo: release_history = ReleaseHistory.from_git_history( repo=git_repo, translator=translator, commit_parser=runtime.commit_parser, exclude_commit_patterns=runtime.changelog_excluded_commit_patterns, ) write_changelog_files( runtime_ctx=runtime, release_history=release_history, hvcs_client=hvcs_client, noop=runtime.global_cli_options.noop, ) if not release_tag: return if not isinstance(hvcs_client, RemoteHvcsBase): click.echo( "Remote does not support releases. Skipping release notes update...", err=True, ) return if not (version := translator.from_tag(release_tag)): click.echo( str.join( " ", [ f"Tag {release_tag!r} does not match the tag format", repr(translator.tag_format), ], ), err=True, ) ctx.exit(1) try: release = release_history.released[version] except KeyError: click.echo(f"tag {release_tag} not in release history", err=True) ctx.exit(2) release_notes = generate_release_notes( hvcs_client, release, runtime.template_dir, release_history, style=runtime.changelog_style, mask_initial_release=runtime.changelog_mask_initial_release, license_name=get_license_name_for_release( tag_name=release_tag, project_root=runtime.repo_dir, ), ) try: post_release_notes( release_tag=release_tag, release_notes=release_notes, prerelease=version.is_prerelease, hvcs_client=hvcs_client, noop=runtime.global_cli_options.noop, ) except Exception as e: # noqa: BLE001 # TODO: catch specific exceptions logger.exception(e) click.echo("Failed to post release notes to remote", err=True) ctx.exit(1) python-semantic-release-10.4.1/src/semantic_release/cli/commands/generate_config.py000066400000000000000000000032431506116242600304750ustar00rootroot00000000000000from __future__ import annotations import json import click import tomlkit from semantic_release.cli.config import RawConfig @click.command( short_help="Generate semantic-release's default configuration", context_settings={ "help_option_names": ["-h", "--help"], }, ) @click.option( "-f", "--format", "fmt", type=click.Choice(["toml", "json"], case_sensitive=False), default="toml", help="format for the config to be generated", ) @click.option( "--pyproject", "is_pyproject_toml", is_flag=True, help=( "Add TOML configuration under 'tool.semantic_release' instead of " "'semantic_release'" ), ) def generate_config(fmt: str = "toml", is_pyproject_toml: bool = False) -> None: """ Generate default configuration for semantic-release, to help you get started quickly. You can inspect the defaults, write to a file and then edit according to your needs. For example, to append the default configuration to your pyproject.toml file, you can use the following command: semantic-release generate-config --pyproject >> pyproject.toml """ # due to possible IntEnum values (which are not supported by tomlkit.dumps, see sdispater/tomlkit#237), # we must ensure the transformation of the model to a dict uses json serializable values config = RawConfig().model_dump(mode="json", exclude_none=True) config_dct = {"semantic_release": config} if is_pyproject_toml and fmt == "toml": config_dct = {"tool": config_dct} if fmt == "toml": click.echo(tomlkit.dumps(config_dct)) elif fmt == "json": click.echo(json.dumps(config_dct, indent=4)) python-semantic-release-10.4.1/src/semantic_release/cli/commands/main.py000066400000000000000000000103771506116242600263100ustar00rootroot00000000000000from __future__ import annotations import importlib import logging from enum import Enum # from typing import TYPE_CHECKING import click from rich.console import Console from rich.logging import RichHandler import semantic_release from semantic_release import globals from semantic_release.cli.cli_context import CliContextObj from semantic_release.cli.config import GlobalCommandLineOptions from semantic_release.cli.const import DEFAULT_CONFIG_FILE from semantic_release.cli.util import rprint from semantic_release.enums import SemanticReleaseLogLevels # if TYPE_CHECKING: # pass FORMAT = "%(message)s" LOG_LEVELS = [ SemanticReleaseLogLevels.WARNING, SemanticReleaseLogLevels.INFO, SemanticReleaseLogLevels.DEBUG, SemanticReleaseLogLevels.SILLY, ] class Cli(click.MultiCommand): """Root MultiCommand for the semantic-release CLI""" class SubCmds(Enum): """Subcommand import definitions""" # SUBCMD_FUNCTION_NAME => MODULE_WITH_FUNCTION CHANGELOG = f"{__package__}.changelog" GENERATE_CONFIG = f"{__package__}.generate_config" VERSION = f"{__package__}.version" PUBLISH = f"{__package__}.publish" def list_commands(self, _ctx: click.Context) -> list[str]: # Used for shell-completion return [subcmd.lower().replace("_", "-") for subcmd in Cli.SubCmds.__members__] def get_command(self, _ctx: click.Context, name: str) -> click.Command | None: subcmd_name = name.lower().replace("-", "_") try: subcmd_def: Cli.SubCmds = Cli.SubCmds.__dict__[subcmd_name.upper()] module_path = subcmd_def.value subcmd_module = importlib.import_module(module_path) return getattr(subcmd_module, subcmd_name) except (KeyError, ModuleNotFoundError, AttributeError): return None @click.command( cls=Cli, context_settings={ "help_option_names": ["-h", "--help"], }, ) @click.version_option( version=semantic_release.__version__, prog_name="semantic-release", help="Show the version of semantic-release and exit", ) @click.option( "-c", "--config", "config_file", default=DEFAULT_CONFIG_FILE, help="Specify a configuration file for semantic-release to use", type=click.Path(), ) @click.option("--noop", "noop", is_flag=True, help="Run semantic-release in no-op mode") @click.option( "-v", "--verbose", "verbosity", help="Set logging verbosity", default=0, count=True, show_default=True, type=click.IntRange(0, len(LOG_LEVELS) - 1, clamp=True), ) @click.option( "--strict", "strict", is_flag=True, default=False, help="Enable strict mode", ) @click.pass_context def main( ctx: click.Context, config_file: str = DEFAULT_CONFIG_FILE, verbosity: int = 0, noop: bool = False, strict: bool = False, ) -> None: """ Python Semantic Release Automated Semantic Versioning based on version 2.0.0 of the Semantic Versioning specification, which can be found at https://semver.org/spec/v2.0.0.html. Detect the next semantically correct version for a project based on the Git history, create and publish a changelog to a remote VCS, build a project. For more information, visit https://python-semantic-release.readthedocs.io/ """ globals.log_level = LOG_LEVELS[verbosity] # Set up our pretty console formatter rich_handler = RichHandler( console=Console(stderr=True), rich_tracebacks=True, tracebacks_suppress=[click] ) rich_handler.setFormatter(logging.Formatter(FORMAT, datefmt="[%X]")) # Set up logging with our pretty console formatter logger = globals.logger logger.handlers.clear() logger.filters.clear() logger.addHandler(rich_handler) logger.setLevel(globals.log_level) logger.debug("logging level set to: %s", logging.getLevelName(globals.log_level)) if noop: rprint( ":shield: [bold cyan]You are running in no-operation mode, because the " "'--noop' flag was supplied" ) cli_options = GlobalCommandLineOptions( noop=noop, verbosity=verbosity, config_file=config_file, strict=strict ) logger.debug("global cli options: %s", cli_options) ctx.obj = CliContextObj(ctx, logger, cli_options) python-semantic-release-10.4.1/src/semantic_release/cli/commands/publish.py000066400000000000000000000054671506116242600270360ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import click from git import Repo from semantic_release.cli.util import noop_report from semantic_release.globals import logger from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.version.algorithm import tags_and_versions if TYPE_CHECKING: # pragma: no cover from semantic_release.cli.cli_context import CliContextObj def publish_distributions( tag: str, hvcs_client: RemoteHvcsBase, dist_glob_patterns: tuple[str, ...], noop: bool = False, ) -> None: if noop: noop_report( str.join( " ", [ "would have uploaded files matching any of the globs", str.join(", ", [repr(g) for g in dist_glob_patterns]), "to a remote VCS release, if supported", ], ) ) return logger.info("Uploading distributions to release") for pattern in dist_glob_patterns: hvcs_client.upload_dists(tag=tag, dist_glob=pattern) # type: ignore[attr-defined] @click.command( short_help="Publish distributions to VCS Releases", context_settings={ "help_option_names": ["-h", "--help"], }, ) @click.option( "--tag", "tag", help="The tag associated with the release to publish to", default="latest", ) @click.pass_obj def publish(cli_ctx: CliContextObj, tag: str) -> None: """Build and publish a distribution to a VCS release.""" ctx = click.get_current_context() runtime = cli_ctx.runtime_ctx hvcs_client = runtime.hvcs_client translator = runtime.version_translator dist_glob_patterns = runtime.dist_glob_patterns with Repo(str(runtime.repo_dir)) as git_repo: repo_tags = git_repo.tags if tag == "latest": try: tag = str(tags_and_versions(repo_tags, translator)[0][0]) except IndexError: click.echo( str.join( " ", [ "No tags found with format", repr(translator.tag_format), "couldn't identify latest version", ], ), err=True, ) ctx.exit(1) if tag not in {tag.name for tag in repo_tags}: click.echo(f"Tag '{tag}' not found in local repository!", err=True) ctx.exit(1) if not isinstance(hvcs_client, RemoteHvcsBase): click.echo( "Remote does not support artifact upload. Exiting with no action taken...", err=True, ) return publish_distributions( tag=tag, hvcs_client=hvcs_client, dist_glob_patterns=dist_glob_patterns, noop=runtime.global_cli_options.noop, ) python-semantic-release-10.4.1/src/semantic_release/cli/commands/version.py000066400000000000000000000637701506116242600270560ustar00rootroot00000000000000from __future__ import annotations import os import subprocess import sys from collections import defaultdict from datetime import datetime, timezone from typing import TYPE_CHECKING import click import shellingham # type: ignore[import] from click_option_group import MutuallyExclusiveOptionGroup, optgroup from git import Repo from requests import HTTPError from semantic_release.changelog.release_history import ReleaseHistory from semantic_release.cli.changelog_writer import ( generate_release_notes, write_changelog_files, ) from semantic_release.cli.github_actions_output import ( PersistenceMode, VersionGitHubActionsOutput, ) from semantic_release.cli.util import noop_report, rprint from semantic_release.const import DEFAULT_SHELL, DEFAULT_VERSION from semantic_release.enums import LevelBump from semantic_release.errors import ( BuildDistributionsError, GitCommitEmptyIndexError, InternalError, UnexpectedResponse, ) from semantic_release.gitproject import GitProject from semantic_release.globals import logger from semantic_release.hvcs.github import Github from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.version.algorithm import ( next_version, tags_and_versions, ) from semantic_release.version.translator import VersionTranslator if TYPE_CHECKING: # pragma: no cover from pathlib import Path from typing import Mapping, Sequence from git.refs.tag import Tag from semantic_release.cli.cli_context import CliContextObj from semantic_release.version.declaration import IVersionReplacer from semantic_release.version.version import Version def is_forced_prerelease( as_prerelease: bool, forced_level_bump: LevelBump | None, prerelease: bool ) -> bool: """ Determine if this release is forced to have prerelease on/off. If ``force_prerelease`` is set then yes. Otherwise if we are forcing a specific level bump without force_prerelease, it's False. Otherwise (``force_level is None``) use the value of ``prerelease`` """ local_vars = list(locals().items()) logger.debug( "%s: %s", is_forced_prerelease.__name__, str.join(", ", iter(f"{k} = {v}" for k, v in local_vars)), ) return ( as_prerelease or forced_level_bump is LevelBump.PRERELEASE_REVISION or ((forced_level_bump is None) and prerelease) ) def last_released(repo_dir: Path, tag_format: str) -> tuple[Tag, Version] | None: with Repo(str(repo_dir)) as git_repo: ts_and_vs = tags_and_versions( git_repo.tags, VersionTranslator(tag_format=tag_format) ) return ts_and_vs[0] if ts_and_vs else None def version_from_forced_level( repo_dir: Path, forced_level_bump: LevelBump, translator: VersionTranslator ) -> Version: with Repo(str(repo_dir)) as git_repo: ts_and_vs = tags_and_versions(git_repo.tags, translator) # If we have no tags, return the default version if not ts_and_vs: # Since the translator is configured by the user, we can't guarantee that it will # be able to parse the default version. So we first cast it to a tag using the default # value and the users configured tag format, then parse it back to a version object default_initial_version = translator.from_tag( translator.str_to_tag(DEFAULT_VERSION) ) if default_initial_version is None: # This should never happen, but if it does, it's a bug raise InternalError( "Translator was unable to parse the embedded default version" ) return default_initial_version.bump(forced_level_bump) _, latest_version = ts_and_vs[0] if forced_level_bump is not LevelBump.PRERELEASE_REVISION: return latest_version.bump(forced_level_bump) # We need to find the latest version with the prerelease token # we're looking for, and return that version + an increment to # the prerelease revision. # NOTE this can probably be cleaned up. # ts_and_vs are in order, so check if we're looking at prereleases # for the same (major, minor, patch) as the latest version. # If we are, we can increment the revision and we're done. If # we don't find a prerelease targeting this version with the same # token as the one we're looking to prerelease, we can use revision 1. for _, version in ts_and_vs: if not ( version.major == latest_version.major and version.minor == latest_version.minor and version.patch == latest_version.patch ): break if ( version.is_prerelease and version.prerelease_token == translator.prerelease_token ): return version.bump(LevelBump.PRERELEASE_REVISION) return latest_version.to_prerelease(token=translator.prerelease_token, revision=1) def apply_version_to_source_files( repo_dir: Path, version_declarations: Sequence[IVersionReplacer], version: Version, noop: bool = False, ) -> list[str]: if len(version_declarations) < 1: return [] if not noop: logger.debug("Updating version %s in repository files...", version) paths = [ decl.update_file_w_version(new_version=version, noop=noop) for decl in version_declarations ] repo_filepaths = [ str(updated_file.relative_to(repo_dir)) for updated_file in paths if updated_file is not None ] if noop: noop_report( str.join( "", [ "would have updated versions in the following paths:", *[f"\n {filepath}" for filepath in repo_filepaths], ], ) ) return repo_filepaths def shell( cmd: str, *, env: Mapping[str, str] | None = None, check: bool = True ) -> subprocess.CompletedProcess: shell: str | None try: shell, _ = shellingham.detect_shell() except shellingham.ShellDetectionFailure: logger.warning("failed to detect shell, using default shell: %s", DEFAULT_SHELL) logger.debug("stack trace", exc_info=True) shell = DEFAULT_SHELL if not shell: raise TypeError("'shell' is None") shell_cmd_param = defaultdict( lambda: "-c", { "cmd": "/c", "powershell": "-Command", "pwsh": "-Command", }, ) return subprocess.run( # noqa: S603 [shell, shell_cmd_param[shell], cmd], env=(env or {}), check=check, ) def is_windows() -> bool: return sys.platform == "win32" def get_windows_env() -> Mapping[str, str | None]: return { environment_variable: os.getenv(environment_variable, None) for environment_variable in ( "ALLUSERSAPPDATA", "ALLUSERSPROFILE", "APPDATA", "COMMONPROGRAMFILES", "COMMONPROGRAMFILES(X86)", "DEFAULTUSERPROFILE", "HOMEPATH", "PATHEXT", "PROFILESFOLDER", "PROGRAMFILES", "PROGRAMFILES(X86)", "SYSTEM", "SYSTEM16", "SYSTEM32", "SYSTEMDRIVE", "SYSTEMPROFILE", "SYSTEMROOT", "TEMP", "TMP", "USERNAME", # must include for python getpass.getuser() on windows "USERPROFILE", "USERSID", "WINDIR", ) } def build_distributions( build_command: str | None, build_command_env: Mapping[str, str] | None = None, noop: bool = False, ) -> None: """ Run the build command to build the distributions. :param build_command: The build command to run. :param build_command_env: The environment variables to use when running the build command. :param noop: Whether or not to run the build command. :raises: BuildDistributionsError: if the build command fails """ if not build_command: rprint("[green]No build command specified, skipping") return if noop: noop_report(f"would have run the build_command {build_command}") return logger.info("Running build command %s", build_command) rprint(f"[bold green]:hammer_and_wrench: Running build command: {build_command}") build_env_vars: dict[str, str] = dict( filter( lambda k_v: k_v[1] is not None, # type: ignore[arg-type] { # Common values "PATH": os.getenv("PATH", ""), "HOME": os.getenv("HOME", None), "VIRTUAL_ENV": os.getenv("VIRTUAL_ENV", None), # Windows environment variables **(get_windows_env() if is_windows() else {}), # affects build decisions "CI": os.getenv("CI", None), # Identifies which CI environment "GITHUB_ACTIONS": os.getenv("GITHUB_ACTIONS", None), "GITLAB_CI": os.getenv("GITLAB_CI", None), "GITEA_ACTIONS": os.getenv("GITEA_ACTIONS", None), "BITBUCKET_CI": ( str(True).lower() if os.getenv("BITBUCKET_REPO_FULL_NAME", None) else None ), "PSR_DOCKER_GITHUB_ACTION": os.getenv("PSR_DOCKER_GITHUB_ACTION", None), **(build_command_env or {}), }.items(), ) ) try: shell(build_command, env=build_env_vars, check=True) rprint("[bold green]Build completed successfully!") except subprocess.CalledProcessError as exc: logger.exception(exc) logger.error("Build command failed with exit code %s", exc.returncode) # noqa: TRY400 raise BuildDistributionsError from exc @click.command( short_help="Detect and apply a new version", context_settings={ "help_option_names": ["-h", "--help"], }, ) @optgroup.group("Print flags", cls=MutuallyExclusiveOptionGroup) @optgroup.option( "--print", "print_only", is_flag=True, help="Print the next version and exit" ) @optgroup.option( "--print-tag", "print_only_tag", is_flag=True, help="Print the next version tag and exit", ) @optgroup.option( "--print-last-released", is_flag=True, help="Print the last released version and exit", ) @optgroup.option( "--print-last-released-tag", is_flag=True, help="Print the last released version tag and exit", ) @click.option( "--as-prerelease", "as_prerelease", is_flag=True, help="Ensure the next version to be released is a prerelease version", ) @click.option( "--prerelease-token", "prerelease_token", default=None, help="Force the next version to use this prerelease token, if it is a prerelease", ) @click.option( "--major", "force_level", flag_value="major", help="Force the next version to be a major release", ) @click.option( "--minor", "force_level", flag_value="minor", help="Force the next version to be a minor release", ) @click.option( "--patch", "force_level", flag_value="patch", help="Force the next version to be a patch release", ) @click.option( "--prerelease", "force_level", flag_value="prerelease_revision", help="Force the next version to be a prerelease", ) @click.option( "--commit/--no-commit", "commit_changes", default=True, help="Whether or not to commit changes locally", ) @click.option( "--tag/--no-tag", "create_tag", default=True, help="Whether or not to create a tag for the new version", ) @click.option( "--changelog/--no-changelog", "update_changelog", default=True, help="Whether or not to update the changelog", ) @click.option( "--push/--no-push", "push_changes", default=True, help="Whether or not to push the new commit and tag to the remote", ) @click.option( "--vcs-release/--no-vcs-release", "make_vcs_release", default=True, help="Whether or not to create a release in the remote VCS, if supported", ) @click.option( "--build-metadata", "build_metadata", default=os.getenv("PSR_BUILD_METADATA"), help="Build metadata to append to the new version", ) @click.option( "--skip-build", "skip_build", default=False, is_flag=True, help="Skip building the current project", ) @click.pass_obj def version( # noqa: C901 cli_ctx: CliContextObj, print_only: bool, print_only_tag: bool, print_last_released: bool, print_last_released_tag: bool, as_prerelease: bool, prerelease_token: str | None, commit_changes: bool, create_tag: bool, update_changelog: bool, push_changes: bool, make_vcs_release: bool, build_metadata: str | None, skip_build: bool, force_level: str | None = None, ) -> None: """ Detect the semantically correct next version that should be applied to your project. By default: * Write this new version to the project metadata locations specified in the configuration file * Create a new commit with these locations and any other assets configured to be included in a release * Tag this commit according the configured format, with a tag that uniquely identifies the version being released. * Push the new tag and commit to the remote for the repository * Create a release (if supported) in the remote VCS for this tag """ ctx = click.get_current_context() # Enable any cli overrides of configuration before asking for the runtime context config = cli_ctx.raw_config # We can short circuit updating the release if we are only printing the last released version if print_last_released or print_last_released_tag: # TODO: get tag format a better way if not ( last_release := last_released(config.repo_dir, tag_format=config.tag_format) ): logger.warning("No release tags found.") return click.echo(last_release[0] if print_last_released_tag else last_release[1]) return # TODO: figure out --print of next version with & without branch validation # do you always need a prerelease token if its not --as-prerelease? runtime = cli_ctx.runtime_ctx translator = runtime.version_translator parser = runtime.commit_parser hvcs_client = runtime.hvcs_client assets = runtime.assets commit_author = runtime.commit_author commit_message = runtime.commit_message major_on_zero = runtime.major_on_zero no_verify = runtime.no_git_verify opts = runtime.global_cli_options gha_output = VersionGitHubActionsOutput( gh_client=hvcs_client if isinstance(hvcs_client, Github) else None, mode=( PersistenceMode.TEMPORARY if opts.noop or (not commit_changes and not create_tag) else PersistenceMode.PERMANENT ), released=False, ) forced_level_bump = None if not force_level else LevelBump.from_string(force_level) prerelease = is_forced_prerelease( as_prerelease=as_prerelease, forced_level_bump=forced_level_bump, prerelease=runtime.prerelease, ) if prerelease_token: logger.info("Forcing use of %s as the prerelease token", prerelease_token) translator.prerelease_token = prerelease_token # Only push if we're committing changes if push_changes and not commit_changes and not create_tag: logger.info("changes will not be pushed because --no-commit disables pushing") push_changes &= commit_changes # Only push if we're creating a tag if push_changes and not create_tag and not commit_changes: logger.info("new tag will not be pushed because --no-tag disables pushing") push_changes &= create_tag # Only make a release if we're pushing the changes if make_vcs_release and not push_changes: logger.info( "No vcs release will be created because pushing changes is disabled" ) make_vcs_release &= push_changes if not forced_level_bump: with Repo(str(runtime.repo_dir)) as git_repo: new_version = next_version( repo=git_repo, translator=translator, commit_parser=parser, prerelease=prerelease, major_on_zero=major_on_zero, allow_zero_version=runtime.allow_zero_version, ) else: logger.warning( "Forcing a '%s' release due to '--%s' command-line flag", force_level, ( force_level if forced_level_bump is not LevelBump.PRERELEASE_REVISION else "prerelease" ), ) new_version = version_from_forced_level( repo_dir=runtime.repo_dir, forced_level_bump=forced_level_bump, translator=translator, ) # We only turn the forced version into a prerelease if the user has specified # that that is what they want on the command-line; otherwise we assume they are # forcing a full release new_version = ( new_version.to_prerelease(token=translator.prerelease_token) if prerelease else new_version.finalize_version() ) if build_metadata: new_version.build_metadata = build_metadata # Update GitHub Actions output value with new version & set delayed write gha_output.version = new_version if isinstance(hvcs_client, Github): ctx.call_on_close(gha_output.write_if_possible) # Make string variant of version or appropriate tag as necessary version_to_print = str(new_version) if not print_only_tag else new_version.as_tag() # Print the new version so that command-line output capture will work click.echo(version_to_print) with Repo(str(runtime.repo_dir)) as git_repo: # TODO: performance improvement - cache the result of tags_and_versions (previously done in next_version()) previously_released_versions = { v for _, v in tags_and_versions(git_repo.tags, translator) } # If the new version has already been released, we fail and abort if strict; # otherwise we exit with 0. if new_version in previously_released_versions: err_msg = str.join( " ", [ "[bold orange1]No release will be made,", f"{new_version!s} has already been released!", ], ) if opts.strict: click.echo(err_msg, err=True) ctx.exit(2) rprint(err_msg) return if print_only or print_only_tag: return # TODO: need a better way as this is inconsistent if releasing older version patches if last_release := last_released(config.repo_dir, tag_format=config.tag_format): # If we have a last release, we can set the previous version for the # GitHub Actions output gha_output.prev_version = last_release[1] with Repo(str(runtime.repo_dir)) as git_repo: release_history = ReleaseHistory.from_git_history( repo=git_repo, translator=translator, commit_parser=parser, exclude_commit_patterns=runtime.changelog_excluded_commit_patterns, ) rprint(f"[bold green]The next version is: [white]{new_version!s}[/white]! :rocket:") commit_date = datetime.now(timezone.utc).astimezone() # Locale-aware timestamp try: # Create release object for the new version # This will be used to generate the changelog prior to the commit and/or tag release_history = release_history.release( new_version, tagger=commit_author, committer=commit_author, tagged_date=commit_date, ) except ValueError as ve: click.echo(str(ve), err=True) ctx.exit(1) all_paths_to_add: list[str] = [] if update_changelog: # Write changelog files & add them to the list of files to commit all_paths_to_add.extend( write_changelog_files( runtime_ctx=runtime, release_history=release_history, hvcs_client=hvcs_client, noop=opts.noop, ) ) # Apply the new version to the source files files_with_new_version_written = apply_version_to_source_files( repo_dir=runtime.repo_dir, version_declarations=runtime.version_declarations, version=new_version, noop=opts.noop, ) all_paths_to_add.extend(files_with_new_version_written) all_paths_to_add.extend(assets or []) # Build distributions before committing any changes - this way if the # build fails, modifications to the source code won't be committed if skip_build: rprint("[bold orange1]Skipping build due to --skip-build flag") else: try: build_distributions( build_command=runtime.build_command, build_command_env={ # User defined overrides of environment (from config) **runtime.build_command_env, # PSR injected environment variables "NEW_VERSION": str(new_version), "PACKAGE_NAME": runtime.project_metadata.get("name", ""), }, noop=opts.noop, ) except BuildDistributionsError as exc: click.echo(str(exc), err=True) click.echo("Build failed, aborting release", err=True) ctx.exit(1) license_cfg = runtime.project_metadata.get( "license-expression", runtime.project_metadata.get( "license", "", ), ) license_cfg = "" if not isinstance(license_cfg, (str, dict)) else license_cfg license_cfg = ( license_cfg.get("text", "") if isinstance(license_cfg, dict) else license_cfg ) gha_output.release_notes = release_notes = generate_release_notes( hvcs_client, release=release_history.released[new_version], template_dir=runtime.template_dir, history=release_history, style=runtime.changelog_style, mask_initial_release=runtime.changelog_mask_initial_release, license_name="" if not isinstance(license_cfg, str) else license_cfg, ) project = GitProject( directory=runtime.repo_dir, commit_author=runtime.commit_author, credential_masker=runtime.masker, ) # Preparing for committing changes; we always stage files even if we're not committing them in order to support a two-stage commit project.git_add(paths=all_paths_to_add, noop=opts.noop) if commit_changes: # NOTE: If we haven't modified any source code then we skip trying to make a commit # and any tag that we apply will be to the HEAD commit (made outside of # running PSR try: project.git_commit( message=commit_message.format(version=new_version), date=int(commit_date.timestamp()), no_verify=no_verify, noop=opts.noop, ) except GitCommitEmptyIndexError: logger.info("No local changes to add to any commit, skipping") # Tag the version after potentially creating a new HEAD commit. # This way if no source code is modified, i.e. all metadata updates # are disabled, and the changelog generation is disabled or it's not # modified, then the HEAD commit will be tagged as a release commit # despite not being made by PSR if create_tag: project.git_tag( tag_name=new_version.as_tag(), message=new_version.as_tag(), isotimestamp=commit_date.isoformat(), noop=opts.noop, ) with Repo(str(runtime.repo_dir)) as git_repo: gha_output.commit_sha = git_repo.head.commit.hexsha if push_changes: remote_url = runtime.hvcs_client.remote_url( use_token=not runtime.ignore_token_for_push ) if commit_changes: # TODO: integrate into push branch with Repo(str(runtime.repo_dir)) as git_repo: active_branch = git_repo.active_branch.name project.git_push_branch( remote_url=remote_url, branch=active_branch, noop=opts.noop, ) if create_tag: # push specific tag refspec (that we made) to remote project.git_push_tag( remote_url=remote_url, tag=new_version.as_tag(), noop=opts.noop, ) # Update GitHub Actions output value now that release has occurred gha_output.released = True if not make_vcs_release: return if not isinstance(hvcs_client, RemoteHvcsBase): logger.info("Remote does not support releases. Skipping release creation...") return exception: Exception | None = None help_message = "" try: hvcs_client.create_release( tag=new_version.as_tag(), release_notes=release_notes, prerelease=new_version.is_prerelease, assets=assets, noop=opts.noop, ) except HTTPError as err: exception = err except UnexpectedResponse as err: exception = err help_message = str.join( " ", [ "Before re-running, make sure to clean up any artifacts", "on the hvcs that may have already been created.", ], ) help_message = str.join( "\n", [ "Unexpected response from remote VCS!", help_message, ], ) except Exception as err: # noqa: BLE001 # TODO: Remove this catch-all exception handler in the future exception = err finally: if exception is not None: logger.exception(exception) click.echo(str(exception), err=True) if help_message: click.echo(help_message, err=True) click.echo( f"Failed to create release on {hvcs_client.__class__.__name__}!", err=True, ) ctx.exit(1) python-semantic-release-10.4.1/src/semantic_release/cli/config.py000066400000000000000000001014411506116242600250210ustar00rootroot00000000000000from __future__ import annotations import logging import os from collections.abc import Mapping from dataclasses import dataclass, is_dataclass from enum import Enum from functools import reduce from pathlib import Path from re import ( Pattern, compile as regexp, error as RegExpError, # noqa: N812 escape as regex_escape, ) from typing import Any, ClassVar, Dict, List, Literal, Optional, Tuple, Type, Union # typing_extensions is for Python 3.8, 3.9, 3.10 compatibility import tomlkit from git import Actor, InvalidGitRepositoryError from git.repo.base import Repo from jinja2 import Environment from pydantic import ( BaseModel, Field, RootModel, ValidationError, field_validator, model_validator, ) from typing_extensions import Annotated, Self from urllib3.util.url import parse_url import semantic_release.hvcs as hvcs from semantic_release.changelog.context import ChangelogMode from semantic_release.changelog.template import environment from semantic_release.cli.const import DEFAULT_CONFIG_FILE from semantic_release.cli.masking_filter import MaskingFilter from semantic_release.commit_parser import ( AngularCommitParser, CommitParser, ConventionalCommitMonorepoParser, ConventionalCommitParser, EmojiCommitParser, ParseResult, ParserOptions, ScipyCommitParser, TagCommitParser, ) from semantic_release.const import COMMIT_MESSAGE, DEFAULT_COMMIT_AUTHOR from semantic_release.errors import ( DetachedHeadGitError, InvalidConfiguration, MissingGitRemote, NotAReleaseBranch, ParserLoadError, ) from semantic_release.globals import logger from semantic_release.helpers import dynamic_import from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.declarations.pattern import PatternVersionDeclaration from semantic_release.version.declarations.toml import TomlVersionDeclaration from semantic_release.version.translator import VersionTranslator NonEmptyString = Annotated[str, Field(..., min_length=1)] class HvcsClient(str, Enum): BITBUCKET = "bitbucket" GITHUB = "github" GITLAB = "gitlab" GITEA = "gitea" _known_commit_parsers: dict[str, type[CommitParser[Any, Any]]] = { "angular": AngularCommitParser, "conventional": ConventionalCommitParser, "conventional-monorepo": ConventionalCommitMonorepoParser, "emoji": EmojiCommitParser, "scipy": ScipyCommitParser, "tag": TagCommitParser, } _known_hvcs: Dict[HvcsClient, Type[hvcs.HvcsBase]] = { HvcsClient.BITBUCKET: hvcs.Bitbucket, HvcsClient.GITHUB: hvcs.Github, HvcsClient.GITLAB: hvcs.Gitlab, HvcsClient.GITEA: hvcs.Gitea, } class EnvConfigVar(BaseModel): env: str default: Optional[str] = None default_env: Optional[str] = None def getvalue(self) -> Optional[str]: return os.getenv(self.env, os.getenv(self.default_env or "", self.default)) MaybeFromEnv = Union[EnvConfigVar, str] class ChangelogOutputFormat(str, Enum): """Supported changelog output formats when using the default templates.""" MARKDOWN = "md" RESTRUCTURED_TEXT = "rst" NONE = "" class ChangelogEnvironmentConfig(BaseModel): block_start_string: str = "{%" block_end_string: str = "%}" variable_start_string: str = "{{" variable_end_string: str = "}}" comment_start_string: str = "{#" comment_end_string: str = "#}" line_statement_prefix: Optional[str] = None line_comment_prefix: Optional[str] = None trim_blocks: bool = False lstrip_blocks: bool = False newline_sequence: Literal["\n", "\r", "\r\n"] = "\n" keep_trailing_newline: bool = False extensions: Tuple[str, ...] = () autoescape: Union[bool, str] = False class DefaultChangelogTemplatesConfig(BaseModel): changelog_file: str = "CHANGELOG.md" output_format: ChangelogOutputFormat = ChangelogOutputFormat.NONE mask_initial_release: bool = True @model_validator(mode="after") def interpret_output_format(self) -> Self: # Set the output format value when no user input is given if self.output_format == ChangelogOutputFormat.NONE: try: # Note: If the user gave no extension, force '.' so enumeration fails, # and defaults to markdown # Otherwise normal files with extensions will just look for the extension support self.output_format = ChangelogOutputFormat( Path(self.changelog_file).suffix.lstrip(".") or "." ) except ValueError: self.output_format = ChangelogOutputFormat.MARKDOWN return self class ChangelogConfig(BaseModel): # TODO: BREAKING CHANGE v11, move to DefaultChangelogTemplatesConfig changelog_file: str = "" """Deprecated! Moved to 'default_templates.changelog_file'""" default_templates: DefaultChangelogTemplatesConfig = ( DefaultChangelogTemplatesConfig(output_format=ChangelogOutputFormat.NONE) ) environment: ChangelogEnvironmentConfig = ChangelogEnvironmentConfig() exclude_commit_patterns: Tuple[str, ...] = () mode: ChangelogMode = ChangelogMode.UPDATE insertion_flag: str = "" template_dir: str = "templates" @field_validator("exclude_commit_patterns", mode="after") @classmethod def validate_match(cls, patterns: Tuple[str, ...]) -> Tuple[str, ...]: curr_index = 0 try: for i, pattern in enumerate(patterns): curr_index = i regexp(pattern) except RegExpError as err: raise ValueError( f"exclude_commit_patterns[{curr_index}]: Invalid regular expression" ) from err return patterns @field_validator("changelog_file", mode="after") @classmethod def changelog_file_deprecation_warning(cls, val: str) -> str: logger.warning( str.join( " ", [ "The 'changelog.changelog_file' configuration option is moving to 'changelog.default_templates.changelog_file'.", "Please update your configuration as the compatibility will break in v10.", ], ) ) return val @model_validator(mode="after") def move_changelog_file(self) -> Self: # TODO: Remove this method in v11 if not self.changelog_file: return self if self.changelog_file == self.default_templates.changelog_file: return self # Re-evaluate now that we are passing the changelog_file option down to default_templates # and only reset the output_format if it was not already set by the user self.default_templates = DefaultChangelogTemplatesConfig.model_validate( { **self.default_templates.model_dump(), "changelog_file": self.changelog_file, "output_format": ( self.default_templates.output_format if self.default_templates.output_format != ChangelogOutputFormat.MARKDOWN else ChangelogOutputFormat.NONE ), } ) return self @model_validator(mode="after") def load_default_insertion_flag_on_missing(self) -> Self: # Set the insertion flag value when no user input is given if not self.insertion_flag: defaults = { ChangelogOutputFormat.MARKDOWN: "", ChangelogOutputFormat.RESTRUCTURED_TEXT: f"..{os.linesep} version list", } try: self.insertion_flag = defaults[self.default_templates.output_format] except KeyError as err: raise ValueError( "changelog.default_templates.output_format cannot be None" ) from err return self class BranchConfig(BaseModel): match: str = "(main|master)" prerelease_token: str = "rc" # noqa: S105 prerelease: bool = False @field_validator("match", mode="after") @classmethod def validate_match(cls, match: str) -> str: # Allow the special case of a plain wildcard although it's not a valid regex if match == "*": return ".*" try: regexp(match) except RegExpError as err: raise ValueError(f"Invalid regex {match!r}") from err return match class RemoteConfig(BaseModel): name: str = "origin" token: Optional[str] = None url: Optional[str] = None type: HvcsClient = HvcsClient.GITHUB domain: Optional[str] = None api_domain: Optional[str] = None ignore_token_for_push: bool = False insecure: bool = False @field_validator("url", "domain", "api_domain", "token", mode="before") @classmethod def resolve_env_vars(cls, val: Any) -> str | None: ret_val = ( val if not isinstance(val, dict) else (EnvConfigVar.model_validate(val).getvalue()) ) return ret_val or None @model_validator(mode="after") def set_default_token(self) -> Self: # Set the default token name for the given VCS when no user input is given if self.token: return self if self.type not in _known_hvcs: return self if env_token := self._get_default_token(): self.token = env_token return self def _get_default_token(self) -> str | None: hvcs_client_class = _known_hvcs[self.type] default_token_name = ( getattr(hvcs_client_class, "DEFAULT_ENV_TOKEN_NAME") # noqa: B009 if hasattr(hvcs_client_class, "DEFAULT_ENV_TOKEN_NAME") else "" ) return ( EnvConfigVar(env=default_token_name).getvalue() if default_token_name else None ) @model_validator(mode="after") def check_url_scheme(self) -> Self: if self.url and isinstance(self.url, str): self.check_insecure_flag(self.url, "url") if self.domain and isinstance(self.domain, str): self.check_insecure_flag(self.domain, "domain") if self.api_domain and isinstance(self.api_domain, str): self.check_insecure_flag(self.api_domain, "api_domain") return self def check_insecure_flag(self, url_str: str, field_name: str) -> None: if not url_str: return scheme = parse_url(url_str).scheme if scheme == "http" and not self.insecure: raise ValueError( str.join( "\n", [ "Insecure 'HTTP' URL detected and disabled by default.", "Set the 'insecure' flag to 'True' to enable insecure connections.", ], ) ) if scheme == "https" and self.insecure: logger.warning( str.join( "\n", [ f"'{field_name}' starts with 'https://' but the 'insecure' flag is set.", "This flag is only necessary for 'http://' URLs.", ], ) ) class PublishConfig(BaseModel): dist_glob_patterns: Tuple[str, ...] = ("dist/*",) upload_to_vcs_release: bool = True class RawConfig(BaseModel): assets: List[str] = [] branches: Dict[str, BranchConfig] = {"main": BranchConfig()} build_command: Optional[str] = None build_command_env: List[str] = [] changelog: ChangelogConfig = ChangelogConfig() commit_author: MaybeFromEnv = EnvConfigVar( env="GIT_COMMIT_AUTHOR", default=DEFAULT_COMMIT_AUTHOR ) commit_message: str = COMMIT_MESSAGE commit_parser: NonEmptyString = "conventional" # It's up to the parser_options() method to validate these commit_parser_options: Dict[str, Any] = {} logging_use_named_masks: bool = False major_on_zero: bool = True allow_zero_version: bool = False repo_dir: Annotated[Path, Field(validate_default=True)] = Path(".") remote: RemoteConfig = RemoteConfig() no_git_verify: bool = False tag_format: str = "v{version}" publish: PublishConfig = PublishConfig() version_toml: Optional[Tuple[str, ...]] = None version_variables: Optional[Tuple[str, ...]] = None @field_validator("repo_dir", mode="before") @classmethod def convert_str_to_path(cls, value: Any) -> Path: if not isinstance(value, (str, Path)): raise TypeError(f"Invalid type: {type(value)}, expected str or Path.") return Path(value) @field_validator("repo_dir", mode="after") @classmethod def verify_git_repo_dir(cls, dir_path: Path) -> Path: try: # Check for repository & walk up parent directories with Repo(str(dir_path), search_parent_directories=True) as git_repo: found_path = ( Path(git_repo.working_tree_dir or git_repo.working_dir) .expanduser() .absolute() ) except InvalidGitRepositoryError as err: raise InvalidGitRepositoryError("No valid git repository found!") from err if dir_path.absolute() != found_path: logging.warning( "Found .git/ in higher parent directory rather than provided in configuration." ) return found_path.resolve() @field_validator("commit_parser", mode="after") @classmethod def tag_commit_parser_deprecation_warning(cls, val: str) -> str: if val == "tag": logger.warning( str.join( " ", [ "The legacy 'tag' parser is deprecated and will be removed in v11.", "Recommend swapping to our emoji parser (higher-compatibility)", "or switch to another supported parser.", ], ) ) return val @field_validator("commit_parser", mode="after") @classmethod def angular_commit_parser_deprecation_warning(cls, val: str) -> str: if val == "angular": logger.warning( str.join( " ", [ "The 'angular' parser is deprecated and will be removed in v11.", "The Angular parser is being renamed to the conventional commit parser,", "which is selected by switching the 'commit_parser' value to 'conventional'.", ], ) ) return val @field_validator("build_command_env", mode="after") @classmethod def remove_whitespace(cls, val: list[str]) -> list[str]: return [entry.strip() for entry in val] @model_validator(mode="after") def set_default_opts(self) -> Self: # Set the default parser options for the given commit parser when no user input is given if not self.commit_parser_options and self.commit_parser: parser_opts_type = None # If the commit parser is a known one, pull the default options object from it if self.commit_parser in _known_commit_parsers: # TODO: BREAKING CHANGE v11 # parser_opts_type = ( # _known_commit_parsers[self.commit_parser] # .get_default_options() # .__class__ # ) parser_opts_type = _known_commit_parsers[ self.commit_parser ].parser_options else: try: # if its a custom parser, try to import it and pull the default options object type custom_class = dynamic_import(self.commit_parser) # TODO: BREAKING CHANGE v11 # parser_opts_type = custom_class.get_default_options().__class__ if hasattr(custom_class, "parser_options"): parser_opts_type = custom_class.parser_options except ModuleNotFoundError as err: raise ParserLoadError( str.join( "\n", [ str(err), "Unable to import your custom parser! Check your configuration!", ], ) ) from err except AttributeError as err: raise ParserLoadError( str.join( "\n", [ str(err), "Unable to find your custom parser class inside the given module.", "Check your configuration!", ], ) ) from err # from either the custom opts class or the known parser opts class, create an instance if callable(parser_opts_type): opts_obj: Any = parser_opts_type() # if the opts object is a dataclass, wrap it in a RootModel so it can be transformed to a Mapping opts_obj = ( opts_obj if not is_dataclass(opts_obj) else RootModel(opts_obj) ) # Must be a mapping, so if it's a BaseModel, dump the model to a dict self.commit_parser_options = ( opts_obj.model_dump() if isinstance(opts_obj, (BaseModel, RootModel)) else opts_obj ) if not isinstance(self.commit_parser_options, Mapping): raise ValidationError( f"Invalid parser options: {opts_obj}. Must be a mapping." ) return self @dataclass class GlobalCommandLineOptions: """ A dataclass to hold all the command line options that should be set in the RuntimeContext """ noop: bool = False verbosity: int = 0 config_file: str = DEFAULT_CONFIG_FILE strict: bool = False ###### # RuntimeContext ###### # This is what we want to attach to `click.Context.obj` # There are currently no defaults here - this is on purpose, # the defaults should be specified and handled by `RawConfig`. # When this is constructed we should know exactly what the user # wants def _recursive_getattr(obj: Any, path: str) -> Any: """ Used to find nested parts of RuntimeContext which might contain sensitive data. Returns None if an attribute is missing """ out = obj for part in path.split("."): out = getattr(out, part, None) return out @dataclass class RuntimeContext: _mask_attrs_: ClassVar[List[str]] = ["hvcs_client.token"] project_metadata: dict[str, Any] repo_dir: Path commit_parser: CommitParser[ParseResult, ParserOptions] version_translator: VersionTranslator major_on_zero: bool allow_zero_version: bool prerelease: bool no_git_verify: bool assets: List[str] commit_author: Actor commit_message: str changelog_excluded_commit_patterns: Tuple[Pattern[str], ...] version_declarations: Tuple[IVersionReplacer, ...] hvcs_client: hvcs.HvcsBase changelog_insertion_flag: str changelog_mask_initial_release: bool changelog_mode: ChangelogMode changelog_file: Path changelog_style: str changelog_output_format: ChangelogOutputFormat ignore_token_for_push: bool template_environment: Environment template_dir: Path build_command: Optional[str] build_command_env: dict[str, str] dist_glob_patterns: Tuple[str, ...] upload_to_vcs_release: bool global_cli_options: GlobalCommandLineOptions # This way the filter can be passed around if needed, so that another function # can accept the filter as an argument and call masker: MaskingFilter @staticmethod def resolve_from_env(param: Optional[MaybeFromEnv]) -> Optional[str]: if isinstance(param, EnvConfigVar): return param.getvalue() return param @staticmethod def select_branch_options( choices: Dict[str, BranchConfig], active_branch: str ) -> BranchConfig: for group, options in choices.items(): if regexp(options.match).match(active_branch): logger.info( "Using group %r options, as %r matches %r", group, options.match, active_branch, ) return options logger.debug( "Rejecting group %r as %r doesn't match %r", group, options.match, active_branch, ) raise NotAReleaseBranch( f"branch {active_branch!r} isn't in any release groups; " "no release will be made" ) def apply_log_masking(self, masker: MaskingFilter) -> MaskingFilter: for attr in self._mask_attrs_: masker.add_mask_for(str(_recursive_getattr(self, attr)), f"context.{attr}") masker.add_mask_for(repr(_recursive_getattr(self, attr)), f"context.{attr}") return masker @classmethod def from_raw_config( # noqa: C901 cls, raw: RawConfig, global_cli_options: GlobalCommandLineOptions ) -> RuntimeContext: ## # credentials masking for logging masker = MaskingFilter(_use_named_masks=raw.logging_use_named_masks) # TODO: move to config if we change how the generated config is constructed # Retrieve project metadata from pyproject.toml project_metadata: dict[str, str] = {} curr_dir = Path.cwd().resolve() allowed_directories = [ dir_path for dir_path in [curr_dir, *curr_dir.parents] if str(raw.repo_dir) in str(dir_path) ] for allowed_dir in allowed_directories: if (proj_toml := allowed_dir.joinpath("pyproject.toml")).exists(): config_toml = tomlkit.parse(proj_toml.read_text()) project_metadata = config_toml.unwrap().get("project", project_metadata) break # Retrieve details from repository with Repo(str(raw.repo_dir)) as git_repo: try: # Get the remote url by calling out to `git remote get-url`. This returns # the expanded url, taking into account any insteadOf directives # in the git configuration. remote_url = raw.remote.url or git_repo.git.remote( "get-url", raw.remote.name ) active_branch = git_repo.active_branch.name except ValueError as err: raise MissingGitRemote( f"Unable to locate remote named '{raw.remote.name}'." ) from err except TypeError as err: raise DetachedHeadGitError( "Detached HEAD state cannot match any release groups; " "no release will be made" ) from err # branch-specific configuration branch_config = cls.select_branch_options(raw.branches, active_branch) # commit_parser try: commit_parser_cls = ( _known_commit_parsers[raw.commit_parser] if raw.commit_parser in _known_commit_parsers else dynamic_import(raw.commit_parser) ) except ValueError as err: raise ParserLoadError( str.join( "\n", [ f"Unrecognized commit parser value: {raw.commit_parser!r}.", str(err), "Unable to load the given parser! Check your configuration!", ], ) ) from err except ModuleNotFoundError as err: raise ParserLoadError( str.join( "\n", [ str(err), "Unable to import your custom parser! Check your configuration!", ], ) ) from err except AttributeError as err: raise ParserLoadError( str.join( "\n", [ str(err), "Unable to find the parser class inside the given module", ], ) ) from err commit_parser_opts_class = commit_parser_cls.parser_options # TODO: Breaking change v11 # commit_parser_opts_class = commit_parser_cls.get_default_options().__class__ try: commit_parser = commit_parser_cls( options=commit_parser_opts_class(**raw.commit_parser_options) ) except TypeError as err: raise ParserLoadError( str.join("\n", [str(err), f"Failed to initialize {raw.commit_parser}"]) ) from err # We always exclude PSR's own release commits from the Changelog # when parsing commits psr_release_commit_regex = regexp( reduce( lambda regex_str, pattern: str(regex_str).replace(*pattern), ( # replace the version holder with a regex pattern to match various versions (regex_escape("{version}"), r"(?P\d+\.\d+\.\d+\S*)"), # TODO: add any other placeholders here ), # We use re.escape to ensure that the commit message is treated as a literal regex_escape(raw.commit_message.strip()), ) ) changelog_excluded_commit_patterns = ( psr_release_commit_regex, *(regexp(pattern) for pattern in raw.changelog.exclude_commit_patterns), ) _commit_author_str = cls.resolve_from_env(raw.commit_author) or "" _commit_author_valid = Actor.name_email_regex.match(_commit_author_str) if not _commit_author_valid: raise ValueError( f"Invalid git author: {_commit_author_str} " f"should match {Actor.name_email_regex}" ) commit_author = Actor(*_commit_author_valid.groups()) version_declarations: list[IVersionReplacer] = [] try: version_declarations.extend( TomlVersionDeclaration.from_string_definition(definition) for definition in iter(raw.version_toml or ()) ) except ValueError as err: raise InvalidConfiguration( str.join( "\n", [ "Invalid 'version_toml' configuration", str(err), ], ) ) from err try: version_declarations.extend( PatternVersionDeclaration.from_string_definition( definition, raw.tag_format ) for definition in iter(raw.version_variables or ()) ) except ValueError as err: raise InvalidConfiguration( str.join( "\n", [ "Invalid 'version_variables' configuration", str(err), ], ) ) from err # Provide warnings if the token is missing if not raw.remote.token: logger.debug("hvcs token is not set") if not raw.remote.ignore_token_for_push: logger.warning("Token value is missing!") # hvcs_client hvcs_client_cls = _known_hvcs[raw.remote.type] hvcs_client = hvcs_client_cls( remote_url=remote_url, hvcs_domain=raw.remote.domain, hvcs_api_domain=raw.remote.api_domain, token=raw.remote.token, allow_insecure=raw.remote.insecure, ) # changelog_file # Must use absolute after resolve because windows does not resolve if the path does not exist # which means it returns a relative path. So we force absolute to ensure path is complete # for the next check of path matching changelog_file = ( Path(raw.changelog.default_templates.changelog_file) .expanduser() .resolve() .absolute() ) # Prevent path traversal attacks if raw.repo_dir not in changelog_file.parents: raise InvalidConfiguration( "Changelog file destination must be inside of the repository directory." ) # Must use absolute after resolve because windows does not resolve if the path does not exist # which means it returns a relative path. So we force absolute to ensure path is complete # for the next check of path matching template_dir = ( Path(raw.changelog.template_dir).expanduser().resolve().absolute() ) # Prevent path traversal attacks if raw.repo_dir not in template_dir.parents: raise InvalidConfiguration( "Template directory must be inside of the repository directory." ) template_environment = environment( template_dir=template_dir, **raw.changelog.environment.model_dump(), ) # version_translator version_translator = VersionTranslator( tag_format=raw.tag_format, prerelease_token=branch_config.prerelease_token ) build_cmd_env = {} for i, env_var_def in enumerate(raw.build_command_env): # creative hack to handle, missing =, but also = that then can be unpacked # as the resulting parts array can be either 2 or 3 in length. it becomes 3 # with our forced empty value at the end which can be dropped parts = [*env_var_def.split("=", 1), ""] # removes any odd spacing around =, and extracts name=value name, env_val = (part.strip() for part in parts[:2]) if not name: # Skip when invalid format (ex. starting with = and no name) logging.warning( "Skipping invalid build_command_env[%s] definition", i, ) continue if not env_val and env_var_def[-1] != "=": # avoid the edge case that user wants to define a value as empty # and don't autoresolve it env_val = os.getenv(name, "") build_cmd_env[name] = env_val # TODO: better support for custom parsers that actually just extend defaults # # Here we just assume the desired changelog style matches the parser name # as we provide templates specific to each parser type. Unfortunately if the user has # provided a custom parser, it would be up to the user to provide custom templates # but we just assume the base template is conventional # changelog_style = ( # raw.commit_parser # if raw.commit_parser in _known_commit_parsers # else "conventional" # ) self = cls( project_metadata=project_metadata, repo_dir=raw.repo_dir, commit_parser=commit_parser, version_translator=version_translator, major_on_zero=raw.major_on_zero, allow_zero_version=raw.allow_zero_version, build_command=raw.build_command, build_command_env=build_cmd_env, version_declarations=tuple(version_declarations), hvcs_client=hvcs_client, changelog_file=changelog_file, changelog_mode=raw.changelog.mode, changelog_mask_initial_release=raw.changelog.default_templates.mask_initial_release, changelog_insertion_flag=raw.changelog.insertion_flag, assets=raw.assets, commit_author=commit_author, commit_message=raw.commit_message, changelog_excluded_commit_patterns=changelog_excluded_commit_patterns, # TODO: change when we have other styles per parser # changelog_style=changelog_style, changelog_style="conventional", changelog_output_format=raw.changelog.default_templates.output_format, prerelease=branch_config.prerelease, ignore_token_for_push=raw.remote.ignore_token_for_push, template_dir=template_dir, template_environment=template_environment, dist_glob_patterns=raw.publish.dist_glob_patterns, upload_to_vcs_release=raw.publish.upload_to_vcs_release, global_cli_options=global_cli_options, masker=masker, no_git_verify=raw.no_git_verify, ) # credential masker self.apply_log_masking(self.masker) return self python-semantic-release-10.4.1/src/semantic_release/cli/const.py000066400000000000000000000002431506116242600247000ustar00rootroot00000000000000DEFAULT_CONFIG_FILE = "pyproject.toml" DEFAULT_RELEASE_NOTES_TPL_FILE = ".release_notes.md.j2" DEFAULT_CHANGELOG_NAME_STEM = "CHANGELOG" JINJA2_EXTENSION = ".j2" python-semantic-release-10.4.1/src/semantic_release/cli/github_actions_output.py000066400000000000000000000124421506116242600302000ustar00rootroot00000000000000from __future__ import annotations import os from enum import Enum from re import compile as regexp from typing import TYPE_CHECKING from semantic_release.globals import logger from semantic_release.version.version import Version if TYPE_CHECKING: from typing import Any from semantic_release.hvcs.github import Github class PersistenceMode(Enum): TEMPORARY = "temporary" PERMANENT = "permanent" class VersionGitHubActionsOutput: OUTPUT_ENV_VAR = "GITHUB_OUTPUT" def __init__( self, gh_client: Github | None = None, mode: PersistenceMode = PersistenceMode.PERMANENT, released: bool | None = None, version: Version | None = None, commit_sha: str | None = None, release_notes: str | None = None, prev_version: Version | None = None, ) -> None: self._gh_client = gh_client self._mode = mode self._released = released self._version = version self._commit_sha = commit_sha self._release_notes = release_notes self._prev_version = prev_version @property def released(self) -> bool | None: return self._released @released.setter def released(self, value: bool) -> None: if not isinstance(value, bool): raise TypeError("output 'released' is boolean") self._released = value @property def version(self) -> Version | None: return self._version if self._version is not None else None @version.setter def version(self, value: Version) -> None: if not isinstance(value, Version): raise TypeError("output 'released' should be a Version") self._version = value @property def tag(self) -> str | None: return self.version.as_tag() if self.version is not None else None @property def is_prerelease(self) -> bool | None: return self.version.is_prerelease if self.version is not None else None @property def commit_sha(self) -> str | None: return self._commit_sha if self._commit_sha else None @commit_sha.setter def commit_sha(self, value: str) -> None: if not isinstance(value, str): raise TypeError("output 'commit_sha' should be a string") if not regexp(r"^[0-9a-f]{40}$").match(value): raise ValueError( "output 'commit_sha' should be a valid 40-hex-character SHA" ) self._commit_sha = value @property def release_notes(self) -> str | None: return self._release_notes if self._release_notes else None @release_notes.setter def release_notes(self, value: str) -> None: if not isinstance(value, str): raise TypeError("output 'release_notes' should be a string") self._release_notes = value @property def prev_version(self) -> Version | None: if not self.released: return self.version return self._prev_version if self._prev_version else None @prev_version.setter def prev_version(self, value: Version) -> None: if not isinstance(value, Version): raise TypeError("output 'prev_version' should be a Version") self._prev_version = value @property def gh_client(self) -> Github: if not self._gh_client: raise ValueError("GitHub client not set, cannot create links") return self._gh_client def to_output_text(self) -> str: missing: set[str] = set() if self.version is None: missing.add("version") if self.released is None: missing.add("released") if self.released: if self.release_notes is None: missing.add("release_notes") if self._mode is PersistenceMode.PERMANENT and self.commit_sha is None: missing.add("commit_sha") if missing: raise ValueError( f"some required outputs were not set: {', '.join(missing)}" ) output_values: dict[str, Any] = { "released": str(self.released).lower(), "version": str(self.version), "tag": self.tag, "is_prerelease": str(self.is_prerelease).lower(), "link": self.gh_client.create_release_url(self.tag) if self.tag else "", "previous_version": str(self.prev_version) if self.prev_version else "", "commit_sha": self.commit_sha if self.commit_sha else "", } multiline_output_values: dict[str, str] = { "release_notes": self.release_notes if self.release_notes else "", } output_lines = [ *[f"{key}={value!s}{os.linesep}" for key, value in output_values.items()], *[ f"{key}< None: output_file = filename or os.getenv(self.OUTPUT_ENV_VAR) if not output_file: logger.info("not writing GitHub Actions output, as no file specified") return with open(output_file, "ab") as f: f.write(self.to_output_text().encode("utf-8")) python-semantic-release-10.4.1/src/semantic_release/cli/masking_filter.py000066400000000000000000000062221506116242600265530ustar00rootroot00000000000000from __future__ import annotations import re from collections import defaultdict from logging import Filter as LoggingFilter from typing import TYPE_CHECKING from semantic_release.globals import logger if TYPE_CHECKING: # pragma: no cover from logging import LogRecord from typing import Iterable # https://relaxdiego.com/2014/07/logging-in-python.html # Updated/adapted for Python3 class MaskingFilter(LoggingFilter): REPLACE_STR = "*" * 4 _UNWANTED = frozenset([s for obj in ("", None) for s in (repr(obj), str(obj))]) def __init__( self, _use_named_masks: bool = False, **patterns: Iterable[str | re.Pattern[str]], ) -> None: super().__init__() self._redact_patterns = defaultdict(set) for k, vs in patterns.items(): self._redact_patterns[k] = {v for v in vs if v and v not in self._UNWANTED} self._use_named_masks = _use_named_masks def add_mask_for(self, data: str, name: str = "redacted") -> MaskingFilter: if data and data not in self._UNWANTED: logger.debug("Adding redact pattern '%r' to redact_patterns", name) self._redact_patterns[name].add(data) return self def filter(self, record: LogRecord) -> bool: # Note if we blindly mask all types, we will actually cast arguments to # log functions from external libraries to strings before they are # formatted into the message - for example, a dependency calling # log.debug("%d", 15) will raise a TypeError as this filter would # otherwise convert 15 to "15", and "%d" % "15" raises the error. # One may find a specific example of where this issue could manifest itself # here: https://github.com/urllib3/urllib3/blob/a5b29ac1025f9bb30f2c9b756f3b171389c2c039/src/urllib3/connectionpool.py#L1003 # Anything which could reasonably be expected to be logged without being # cast to a string should be excluded from the cast here. record.msg = self.mask(record.msg) if record.args is None: pass elif isinstance(record.args, dict): record.args = { k: v if type(v) in (bool, int, float) else self.mask(str(v)) for k, v in record.args.items() } else: record.args = tuple( arg if type(arg) in (bool, int, float) else self.mask(str(arg)) for arg in record.args ) return True def mask(self, msg: str) -> str: if not isinstance(msg, str): logger.debug( # type: ignore[unreachable] "cannot mask object of type %s", type(msg) ) return msg for mask, values in self._redact_patterns.items(): repl_string = ( self.REPLACE_STR if not self._use_named_masks else f"<{mask!r} (value removed)>" ) for data in values: if isinstance(data, str): msg = msg.replace(data, repl_string) elif isinstance(data, re.Pattern): msg = data.sub(repl_string, msg) return msg python-semantic-release-10.4.1/src/semantic_release/cli/util.py000066400000000000000000000072421506116242600245350ustar00rootroot00000000000000"""Utilities for command-line functionality""" from __future__ import annotations import json import sys from pathlib import Path from textwrap import dedent, indent from typing import Any import rich import rich.markup import tomlkit from tomlkit.exceptions import TOMLKitError from semantic_release.errors import InvalidConfiguration from semantic_release.globals import logger def rprint(msg: str) -> None: """Rich-prints to stderr so that redirection of command output isn't cluttered""" rich.print(msg, file=sys.stderr) def noop_report(msg: str) -> None: """ Rich-prints a msg with a standard prefix to report when an action is not being taken due to a "noop" flag """ rprint(f"[bold cyan][:shield: NOP] {rich.markup.escape(msg)}") def indented(msg: str, prefix: str = " " * 4) -> str: """ Convenience function for text-formatting for the console. Ensures the least indented line of the msg string is indented by ``prefix`` with consistent alignment of the remainder of ``msg`` irrespective of the level of indentation in the Python source code """ return indent(dedent(msg), prefix=prefix) def parse_toml(raw_text: str) -> dict[Any, Any]: """ Attempts to parse raw configuration for semantic_release using tomlkit.loads, raising InvalidConfiguration if the TOML is invalid or there's no top level "semantic_release" or "tool.semantic_release" keys """ try: toml_text = tomlkit.loads(raw_text).unwrap() except TOMLKitError as exc: raise InvalidConfiguration(str(exc)) from exc # Look for [tool.semantic_release] cfg_text = toml_text.get("tool", {}).get("semantic_release") if cfg_text is not None: return cfg_text # Look for [semantic_release] or return {} if not found return toml_text.get("semantic_release", {}) def load_raw_config_file(config_file: Path | str) -> dict[Any, Any]: """ Load raw configuration as a dict from the filename specified by config_filename, trying the following parsing methods: 1. try to parse with tomli.load (guessing it's a TOML file) 2. try to parse with json.load (guessing it's a JSON file) 3. raise InvalidConfiguration if none of the above parsing methods work This function will also raise FileNotFoundError if it is raised while trying to read the specified configuration file """ logger.info("Loading configuration from %s", config_file) raw_text = (Path() / config_file).resolve().read_text(encoding="utf-8") try: logger.debug("Trying to parse configuration %s in TOML format", config_file) return parse_toml(raw_text) except InvalidConfiguration as e: logger.debug("Configuration %s is invalid TOML: %s", config_file, str(e)) logger.debug("trying to parse %s as JSON", config_file) try: # could be a "parse_json" function but it's a one-liner here return json.loads(raw_text)["semantic_release"] except KeyError: # valid configuration, but no "semantic_release" or "tool.semantic_release" # top level key logger.debug( "configuration has no 'semantic_release' or 'tool.semantic_release' " "top-level key" ) return {} except json.JSONDecodeError as jde: raise InvalidConfiguration( dedent( f""" None of the supported configuration parsers were able to parse the configuration file {config_file}: * TOML: {e!s} * JSON: {jde!s} """ ) ) from jde python-semantic-release-10.4.1/src/semantic_release/commit_parser/000077500000000000000000000000001506116242600252765ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/commit_parser/__init__.py000066400000000000000000000024531506116242600274130ustar00rootroot00000000000000from semantic_release.commit_parser._base import ( CommitParser, ParserOptions, ) from semantic_release.commit_parser.angular import ( AngularCommitParser, AngularParserOptions, ) from semantic_release.commit_parser.conventional import ( ConventionalCommitMonorepoParser, ConventionalCommitMonorepoParserOptions, ConventionalCommitParser, ConventionalCommitParserOptions, ) from semantic_release.commit_parser.emoji import ( EmojiCommitParser, EmojiParserOptions, ) from semantic_release.commit_parser.scipy import ( ScipyCommitParser, ScipyParserOptions, ) from semantic_release.commit_parser.tag import ( TagCommitParser, TagParserOptions, ) from semantic_release.commit_parser.token import ( ParsedCommit, ParseError, ParseResult, ParseResultType, ) __all__ = [ "CommitParser", "ParserOptions", "AngularCommitParser", "AngularParserOptions", "ConventionalCommitParser", "ConventionalCommitParserOptions", "ConventionalCommitMonorepoParser", "ConventionalCommitMonorepoParserOptions", "EmojiCommitParser", "EmojiParserOptions", "ScipyCommitParser", "ScipyParserOptions", "TagCommitParser", "TagParserOptions", "ParsedCommit", "ParseError", "ParseResult", "ParseResultType", ] python-semantic-release-10.4.1/src/semantic_release/commit_parser/_base.py000066400000000000000000000057621506116242600267330ustar00rootroot00000000000000from __future__ import annotations from abc import ABC, abstractmethod from typing import TYPE_CHECKING, Any, Generic, TypeVar from semantic_release.commit_parser.token import ParseResultType if TYPE_CHECKING: # pragma: no cover from git.objects.commit import Commit class ParserOptions(dict): """ ParserOptions should accept the keyword arguments they are interested in from configuration and process them as desired, ultimately creating attributes on an instance which can be accessed by the corresponding commit parser. For example: >>> class MyParserOptions(ParserOptions): ... def __init__(self, message_prefix: str) -> None: ... self.prefix = message_prefix * 2 >>> class MyCommitParser(AbstractCommitParser): ... parser_options = MyParserOptions ... ... def parse(self, Commit): ... print(self.options.prefix) ... ... Any defaults that need to be set should also be done in this class too. Invalid options should be signalled by raising an ``InvalidOptionsException`` within the ``__init__`` method of the options class. A dataclass is also well suited to this; if type-checking of input is desired, a ``pydantic.dataclasses.dataclass`` works well and is used internally by python-semantic-release. Parser options are not validated in the configuration and passed directly to the appropriate class to handle. """ def __init__(self, **_: Any) -> None: pass # TT = TokenType, a subclass of ParsedCommit _TT = TypeVar("_TT", bound=ParseResultType) _OPTS = TypeVar("_OPTS", bound=ParserOptions) class CommitParser(ABC, Generic[_TT, _OPTS]): """ Abstract base class for all commit parsers. Custom commit parsers should inherit from this class. A class-level ``parser_options`` attribute should be set to a subclass of ``BaseParserOptions``; this will be used to provide the default options to the parser. Note that a nested class can be used directly, if preferred: >>> class MyParser(CommitParser): @dataclass class parser_options(ParserOptions): allowed_types: Tuple[str] = ("feat", "fix", "docs") major_types: Tuple[str] = ("breaking",) minor_types: Tuple[str] = ("fix", "patch") ... def __init__(self, options: parser_options) -> None: ... """ # TODO: Deprecate in lieu of get_default_options() parser_options: type[ParserOptions] = ParserOptions def __init__(self, options: _OPTS | None = None) -> None: self.options: _OPTS = ( options if options is not None else self.get_default_options() ) # TODO: BREAKING CHANGE v11, add abstract method for all custom parsers # @staticmethod # @abstractmethod def get_default_options(self) -> _OPTS: return self.parser_options() # type: ignore[return-value] @abstractmethod def parse(self, commit: Commit) -> _TT | list[_TT]: ... python-semantic-release-10.4.1/src/semantic_release/commit_parser/angular.py000066400000000000000000000441461506116242600273120ustar00rootroot00000000000000""" Angular commit style parser https://github.com/angular/angular/blob/master/CONTRIBUTING.md#-commit-message-guidelines """ from __future__ import annotations import re from functools import reduce from itertools import zip_longest from re import compile as regexp from textwrap import dedent from typing import TYPE_CHECKING, Tuple from git.objects.commit import Commit from pydantic.dataclasses import dataclass from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.token import ( ParsedCommit, ParsedMessageResult, ParseError, ParseResult, ) from semantic_release.commit_parser.util import ( breaking_re, deep_copy_commit, force_str, parse_paragraphs, ) from semantic_release.enums import LevelBump from semantic_release.errors import InvalidParserOptions from semantic_release.globals import logger from semantic_release.helpers import sort_numerically, text_reducer if TYPE_CHECKING: # pragma: no cover from git.objects.commit import Commit def _logged_parse_error(commit: Commit, error: str) -> ParseError: logger.debug(error) return ParseError(commit, error=error) # TODO: Remove from here, allow for user customization instead via options # types with long names in changelog LONG_TYPE_NAMES = { "build": "build system", "ci": "continuous integration", "chore": "chores", "docs": "documentation", "feat": "features", "fix": "bug fixes", "perf": "performance improvements", "refactor": "refactoring", "style": "code style", "test": "testing", } @dataclass class AngularParserOptions(ParserOptions): """Options dataclass for AngularCommitParser""" minor_tags: Tuple[str, ...] = ("feat",) """Commit-type prefixes that should result in a minor release bump.""" patch_tags: Tuple[str, ...] = ("fix", "perf") """Commit-type prefixes that should result in a patch release bump.""" other_allowed_tags: Tuple[str, ...] = ( "build", "chore", "ci", "docs", "style", "refactor", "test", ) """Commit-type prefixes that are allowed but do not result in a version bump.""" allowed_tags: Tuple[str, ...] = ( *minor_tags, *patch_tags, *other_allowed_tags, ) """ All commit-type prefixes that are allowed. These are used to identify a valid commit message. If a commit message does not start with one of these prefixes, it will not be considered a valid commit message. """ default_bump_level: LevelBump = LevelBump.NO_RELEASE """The minimum bump level to apply to valid commit message.""" parse_squash_commits: bool = False """Toggle flag for whether or not to parse squash commits""" ignore_merge_commits: bool = False """Toggle flag for whether or not to ignore merge commits""" @property def tag_to_level(self) -> dict[str, LevelBump]: """A mapping of commit tags to the level bump they should result in.""" return self._tag_to_level def __post_init__(self) -> None: self._tag_to_level: dict[str, LevelBump] = { str(tag): level for tag, level in [ # we have to do a type ignore as zip_longest provides a type that is not specific enough # for our expected output. Due to the empty second array, we know the first is always longest # and that means no values in the first entry of the tuples will ever be a LevelBump. We # apply a str() to make mypy happy although it will never happen. *zip_longest(self.allowed_tags, (), fillvalue=self.default_bump_level), *zip_longest(self.patch_tags, (), fillvalue=LevelBump.PATCH), *zip_longest(self.minor_tags, (), fillvalue=LevelBump.MINOR), ] if "|" not in str(tag) } class AngularCommitParser(CommitParser[ParseResult, AngularParserOptions]): """ A commit parser for projects conforming to the angular style of conventional commits. See https://www.conventionalcommits.org/en/v1.0.0-beta.4/ """ # TODO: Deprecate in lieu of get_default_options() parser_options = AngularParserOptions def __init__(self, options: AngularParserOptions | None = None) -> None: super().__init__(options) try: commit_type_pattern = regexp( r"(?P%s)" % str.join("|", self.options.allowed_tags) ) except re.error as err: raise InvalidParserOptions( str.join( "\n", [ f"Invalid options for {self.__class__.__name__}", "Unable to create regular expression from configured commit-types.", "Please check the configured commit-types and remove or escape any regular expression characters.", ], ) ) from err self.commit_prefix = regexp( str.join( "", [ f"^{commit_type_pattern.pattern}", r"(?:\((?P[^\n]+)\))?", # TODO: remove ! support as it is not part of the angular commit spec (its part of conventional commits spec) r"(?P!)?:\s+", ], ) ) self.re_parser = regexp( str.join( "", [ self.commit_prefix.pattern, r"(?P[^\n]+)", r"(?:\n\n(?P.+))?", # commit body ], ), flags=re.DOTALL, ) # GitHub & Gitea use (#123), GitLab uses (!123), and BitBucket uses (pull request #123) self.mr_selector = regexp( r"[\t ]+\((?:pull request )?(?P[#!]\d+)\)[\t ]*$" ) self.issue_selector = regexp( str.join( "", [ r"^(?:clos(?:e|es|ed|ing)|fix(?:es|ed|ing)?|resolv(?:e|es|ed|ing)|implement(?:s|ed|ing)?):", r"[\t ]+(?P.+)[\t ]*$", ], ), flags=re.MULTILINE | re.IGNORECASE, ) self.notice_selector = regexp(r"^NOTICE: (?P.+)$") self.filters = { "typo-extra-spaces": (regexp(r"(\S) +(\S)"), r"\1 \2"), "git-header-commit": ( regexp(r"^[\t ]*commit [0-9a-f]+$\n?", flags=re.MULTILINE), "", ), "git-header-author": ( regexp(r"^[\t ]*Author: .+$\n?", flags=re.MULTILINE), "", ), "git-header-date": ( regexp(r"^[\t ]*Date: .+$\n?", flags=re.MULTILINE), "", ), "git-squash-heading": ( regexp( r"^[\t ]*Squashed commit of the following:.*$\n?", flags=re.MULTILINE, ), "", ), "git-squash-commit-prefix": ( regexp( str.join( "", [ r"^(?:[\t ]*[*-][\t ]+|[\t ]+)?", # bullet points or indentation commit_type_pattern.pattern + r"\b", # prior to commit type ], ), flags=re.MULTILINE, ), # move commit type to the start of the line r"\1", ), } @staticmethod def get_default_options() -> AngularParserOptions: return AngularParserOptions() def commit_body_components_separator( self, accumulator: dict[str, list[str]], text: str ) -> dict[str, list[str]]: if (match := breaking_re.match(text)) and (brk_desc := match.group(1)): accumulator["breaking_descriptions"].append(brk_desc) elif (match := self.notice_selector.match(text)) and ( notice := match.group("notice") ): accumulator["notices"].append(notice) elif match := self.issue_selector.search(text): # if match := self.issue_selector.search(text): predicate = regexp(r",? and | *[,;/& ] *").sub( ",", match.group("issue_predicate") or "" ) # Almost all issue trackers use a number to reference an issue so # we use a simple regexp to validate the existence of a number which helps filter out # any non-issue references that don't fit our expected format has_number = regexp(r"\d+") new_issue_refs: set[str] = set( filter( lambda issue_str, validator=has_number: validator.search(issue_str), # type: ignore[arg-type] predicate.split(","), ) ) if new_issue_refs: accumulator["linked_issues"] = sort_numerically( set(accumulator["linked_issues"]).union(new_issue_refs) ) # Prevent appending duplicate descriptions if text not in accumulator["descriptions"]: accumulator["descriptions"].append(text) return accumulator def parse_message(self, message: str) -> ParsedMessageResult | None: if not (parsed := self.re_parser.match(message)): return None parsed_break = parsed.group("break") parsed_scope = parsed.group("scope") or "" parsed_subject = parsed.group("subject") parsed_text = parsed.group("text") parsed_type = parsed.group("type") linked_merge_request = "" if mr_match := self.mr_selector.search(parsed_subject): linked_merge_request = mr_match.group("mr_number") body_components: dict[str, list[str]] = reduce( self.commit_body_components_separator, [ # Insert the subject before the other paragraphs parsed_subject, *parse_paragraphs(parsed_text or ""), ], { "breaking_descriptions": [], "descriptions": [], "notices": [], "linked_issues": [], }, ) level_bump = ( LevelBump.MAJOR # TODO: remove parsed break support as it is not part of the angular commit spec (its part of conventional commits spec) if body_components["breaking_descriptions"] or parsed_break else self.options.tag_to_level.get( parsed_type, self.options.default_bump_level ) ) return ParsedMessageResult( bump=level_bump, type=parsed_type, category=LONG_TYPE_NAMES.get(parsed_type, parsed_type), scope=parsed_scope, descriptions=tuple(body_components["descriptions"]), breaking_descriptions=tuple(body_components["breaking_descriptions"]), release_notices=tuple(body_components["notices"]), linked_issues=tuple(body_components["linked_issues"]), linked_merge_request=linked_merge_request, ) @staticmethod def is_merge_commit(commit: Commit) -> bool: return len(commit.parents) > 1 def parse_commit(self, commit: Commit) -> ParseResult: if not (parsed_msg_result := self.parse_message(force_str(commit.message))): return _logged_parse_error( commit, f"Unable to parse commit message: {commit.message!r}", ) return ParsedCommit.from_parsed_message_result(commit, parsed_msg_result) # Maybe this can be cached as an optimization, similar to how # mypy/pytest use their own caching directories, for very large commit # histories? # The problem is the cache likely won't be present in CI environments def parse(self, commit: Commit) -> ParseResult | list[ParseResult]: """ Parse a commit message If the commit message is a squashed merge commit, it will be split into multiple commits, each of which will be parsed separately. Single commits will be returned as a list of a single ParseResult. """ if self.options.ignore_merge_commits and self.is_merge_commit(commit): return _logged_parse_error( commit, "Ignoring merge commit: %s" % commit.hexsha[:8] ) separate_commits: list[Commit] = ( self.unsquash_commit(commit) if self.options.parse_squash_commits else [commit] ) # Parse each commit individually if there were more than one parsed_commits: list[ParseResult] = list( map(self.parse_commit, separate_commits) ) def add_linked_merge_request( parsed_result: ParseResult, mr_number: str ) -> ParseResult: return ( parsed_result if not isinstance(parsed_result, ParsedCommit) else ParsedCommit( **{ **parsed_result._asdict(), "linked_merge_request": mr_number, } ) ) # TODO: improve this for other VCS systems other than GitHub & BitBucket # Github works as the first commit in a squash merge commit has the PR number # appended to the first line of the commit message lead_commit = next(iter(parsed_commits)) if isinstance(lead_commit, ParsedCommit) and lead_commit.linked_merge_request: # If the first commit has linked merge requests, assume all commits # are part of the same PR and add the linked merge requests to all # parsed commits parsed_commits = [ lead_commit, *map( lambda parsed_result, mr=lead_commit.linked_merge_request: ( # type: ignore[misc] add_linked_merge_request(parsed_result, mr) ), parsed_commits[1:], ), ] elif isinstance(lead_commit, ParseError) and ( mr_match := self.mr_selector.search(force_str(lead_commit.message)) ): # Handle BitBucket Squash Merge Commits (see #1085), which have non angular commit # format but include the PR number in the commit subject that we want to extract linked_merge_request = mr_match.group("mr_number") # apply the linked MR to all commits parsed_commits = [ add_linked_merge_request(parsed_result, linked_merge_request) for parsed_result in parsed_commits ] return parsed_commits def unsquash_commit(self, commit: Commit) -> list[Commit]: # GitHub EXAMPLE: # feat(changelog): add autofit_text_width filter to template environment (#1062) # # This change adds an equivalent style formatter that can apply a text alignment # to a maximum width and also maintain an indent over paragraphs of text # # * docs(changelog-templates): add definition & usage of autofit_text_width template filter # # * test(changelog-context): add test cases to check autofit_text_width filter use # # `git merge --squash` EXAMPLE: # Squashed commit of the following: # # commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb # Author: codejedi365 # Date: Sun Oct 13 12:05:23 2024 -0600 # # feat(release-config): some commit subject # # Return a list of artificial commits (each with a single commit message) return [ # create a artificial commit object (copy of original but with modified message) Commit( **{ **deep_copy_commit(commit), "message": commit_msg, } ) for commit_msg in self.unsquash_commit_message(force_str(commit.message)) ] or [commit] def unsquash_commit_message(self, message: str) -> list[str]: normalized_message = message.replace("\r", "").strip() # split by obvious separate commits (applies to manual git squash merges) obvious_squashed_commits = self.filters["git-header-commit"][0].split( normalized_message ) separate_commit_msgs: list[str] = reduce( lambda all_msgs, msgs: all_msgs + msgs, map(self._find_squashed_commits_in_str, obvious_squashed_commits), [], ) return list(filter(None, separate_commit_msgs)) def _find_squashed_commits_in_str(self, message: str) -> list[str]: separate_commit_msgs: list[str] = [] current_msg = "" for paragraph in filter(None, message.strip().split("\n\n")): # Apply filters to normalize the paragraph clean_paragraph = reduce(text_reducer, self.filters.values(), paragraph) # remove any filtered (and now empty) paragraphs (ie. the git headers) if not clean_paragraph.strip(): continue # Check if the paragraph is the start of a new angular commit if not self.commit_prefix.search(clean_paragraph): if not separate_commit_msgs and not current_msg: # if there are no separate commit messages and no current message # then this is the first commit message current_msg = dedent(clean_paragraph) continue # append the paragraph as part of the previous commit message if current_msg: current_msg += f"\n\n{dedent(clean_paragraph)}" # else: drop the paragraph continue # Since we found the start of the new commit, store any previous commit # message separately and start the new commit message if current_msg: separate_commit_msgs.append(current_msg) current_msg = clean_paragraph return [*separate_commit_msgs, current_msg] python-semantic-release-10.4.1/src/semantic_release/commit_parser/conventional/000077500000000000000000000000001506116242600277755ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/commit_parser/conventional/__init__.py000066400000000000000000000011321506116242600321030ustar00rootroot00000000000000from semantic_release.commit_parser.conventional.options import ( ConventionalCommitParserOptions, ) from semantic_release.commit_parser.conventional.options_monorepo import ( ConventionalCommitMonorepoParserOptions, ) from semantic_release.commit_parser.conventional.parser import ConventionalCommitParser from semantic_release.commit_parser.conventional.parser_monorepo import ( ConventionalCommitMonorepoParser, ) __all__ = [ "ConventionalCommitParser", "ConventionalCommitParserOptions", "ConventionalCommitMonorepoParser", "ConventionalCommitMonorepoParserOptions", ] python-semantic-release-10.4.1/src/semantic_release/commit_parser/conventional/options.py000066400000000000000000000050461506116242600320470ustar00rootroot00000000000000from __future__ import annotations from itertools import zip_longest from typing import Tuple from pydantic.dataclasses import dataclass from semantic_release.commit_parser._base import ParserOptions from semantic_release.enums import LevelBump @dataclass class ConventionalCommitParserOptions(ParserOptions): """Options dataclass for the ConventionalCommitParser.""" minor_tags: Tuple[str, ...] = ("feat",) """Commit-type prefixes that should result in a minor release bump.""" patch_tags: Tuple[str, ...] = ("fix", "perf") """Commit-type prefixes that should result in a patch release bump.""" other_allowed_tags: Tuple[str, ...] = ( "build", "chore", "ci", "docs", "style", "refactor", "test", ) """Commit-type prefixes that are allowed but do not result in a version bump.""" allowed_tags: Tuple[str, ...] = ( *minor_tags, *patch_tags, *other_allowed_tags, ) """ All commit-type prefixes that are allowed. These are used to identify a valid commit message. If a commit message does not start with one of these prefixes, it will not be considered a valid commit message. """ default_bump_level: LevelBump = LevelBump.NO_RELEASE """The minimum bump level to apply to valid commit message.""" parse_squash_commits: bool = True """Toggle flag for whether or not to parse squash commits""" ignore_merge_commits: bool = True """Toggle flag for whether or not to ignore merge commits""" @property def tag_to_level(self) -> dict[str, LevelBump]: """A mapping of commit tags to the level bump they should result in.""" return self._tag_to_level def __post_init__(self) -> None: self._tag_to_level: dict[str, LevelBump] = { str(tag): level for tag, level in [ # we have to do a type ignore as zip_longest provides a type that is not specific enough # for our expected output. Due to the empty second array, we know the first is always longest # and that means no values in the first entry of the tuples will ever be a LevelBump. We # apply a str() to make mypy happy although it will never happen. *zip_longest(self.allowed_tags, (), fillvalue=self.default_bump_level), *zip_longest(self.patch_tags, (), fillvalue=LevelBump.PATCH), *zip_longest(self.minor_tags, (), fillvalue=LevelBump.MINOR), ] if "|" not in str(tag) } python-semantic-release-10.4.1/src/semantic_release/commit_parser/conventional/options_monorepo.py000066400000000000000000000062121506116242600337610ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from re import compile as regexp, error as RegExpError # noqa: N812 from typing import TYPE_CHECKING, Any, Iterable, Tuple from pydantic import Field, field_validator from pydantic.dataclasses import dataclass # typing_extensions is for Python 3.8, 3.9, 3.10 compatibility from typing_extensions import Annotated from semantic_release.commit_parser.conventional.options import ( ConventionalCommitParserOptions, ) if TYPE_CHECKING: # pragma: no cover pass @dataclass class ConventionalCommitMonorepoParserOptions(ConventionalCommitParserOptions): # TODO: add example into the docstring """Options dataclass for ConventionalCommitMonorepoParser.""" path_filters: Annotated[Tuple[str, ...], Field(validate_default=True)] = (".",) """ A set of relative paths to filter commits by. Only commits with file changes that match these file paths or its subdirectories will be considered valid commits. Syntax is similar to .gitignore with file path globs and inverse file match globs via `!` prefix. Paths should be relative to the current working directory. """ scope_prefix: str = "" """ A prefix that will be striped from the scope when parsing commit messages. If set, it will cause unscoped commits to be ignored. Use this in tandem with the `path_filters` option to filter commits by directory and scope. This will be fed into a regular expression so you must escape any special characters that are meaningful in regular expressions (e.g. `.`, `*`, `?`, `+`, etc.) if you want to match them literally. """ @classmethod @field_validator("path_filters", mode="before") def convert_strs_to_paths(cls, value: Any) -> tuple[Path, ...]: values = value if isinstance(value, Iterable) else [value] results: list[Path] = [] for val in values: if isinstance(val, (str, Path)): results.append(Path(val)) continue raise TypeError(f"Invalid type: {type(val)}, expected str or Path.") return tuple(results) @classmethod @field_validator("path_filters", mode="after") def resolve_path(cls, dir_paths: tuple[Path, ...]) -> tuple[Path, ...]: return tuple( ( Path(f"!{Path(str_path[1:]).expanduser().absolute().resolve()}") # maintains the negation prefix if it exists if (str_path := str(path)).startswith("!") # otherwise, resolve the path normally else path.expanduser().absolute().resolve() ) for path in dir_paths ) @classmethod @field_validator("scope_prefix", mode="after") def validate_scope_prefix(cls, scope_prefix: str) -> str: if not scope_prefix: return "" # Allow the special case of a plain wildcard although it's not a valid regex if scope_prefix == "*": return ".*" try: regexp(scope_prefix) except RegExpError as err: raise ValueError(f"Invalid regex {scope_prefix!r}") from err return scope_prefix python-semantic-release-10.4.1/src/semantic_release/commit_parser/conventional/parser.py000066400000000000000000000404371506116242600316530ustar00rootroot00000000000000from __future__ import annotations from functools import reduce from logging import getLogger from re import ( DOTALL, IGNORECASE, MULTILINE, Match as RegexMatch, Pattern, compile as regexp, error as RegexError, # noqa: N812 ) from textwrap import dedent from typing import TYPE_CHECKING, ClassVar from git.objects.commit import Commit from semantic_release.commit_parser._base import CommitParser from semantic_release.commit_parser.conventional.options import ( ConventionalCommitParserOptions, ) from semantic_release.commit_parser.token import ( ParsedCommit, ParsedMessageResult, ParseError, ParseResult, ) from semantic_release.commit_parser.util import ( breaking_re, deep_copy_commit, force_str, parse_paragraphs, ) from semantic_release.enums import LevelBump from semantic_release.errors import InvalidParserOptions from semantic_release.helpers import sort_numerically, text_reducer if TYPE_CHECKING: pass # TODO: Remove from here, allow for user customization instead via options # types with long names in changelog LONG_TYPE_NAMES = { "build": "build system", "ci": "continuous integration", "chore": "chores", "docs": "documentation", "feat": "features", "fix": "bug fixes", "perf": "performance improvements", "refactor": "refactoring", "style": "code style", "test": "testing", } class ConventionalCommitParser( CommitParser[ParseResult, ConventionalCommitParserOptions] ): """ A commit parser for projects conforming to the conventional commits specification. See https://www.conventionalcommits.org/en/v1.0.0/ """ # TODO: Deprecate in lieu of get_default_options() parser_options = ConventionalCommitParserOptions # GitHub & Gitea use (#123), GitLab uses (!123), and BitBucket uses (pull request #123) mr_selector = regexp(r"[\t ]+\((?:pull request )?(?P[#!]\d+)\)[\t ]*$") issue_selector = regexp( str.join( "", [ r"^(?:clos(?:e|es|ed|ing)|fix(?:es|ed|ing)?|resolv(?:e|es|ed|ing)|implement(?:s|ed|ing)?):", r"[\t ]+(?P.+)[\t ]*$", ], ), flags=MULTILINE | IGNORECASE, ) notice_selector = regexp(r"^NOTICE: (?P.+)$") common_commit_msg_filters: ClassVar[dict[str, tuple[Pattern[str], str]]] = { "typo-extra-spaces": (regexp(r"(\S) +(\S)"), r"\1 \2"), "git-header-commit": ( regexp(r"^[\t ]*commit [0-9a-f]+$\n?", flags=MULTILINE), "", ), "git-header-author": ( regexp(r"^[\t ]*Author: .+$\n?", flags=MULTILINE), "", ), "git-header-date": ( regexp(r"^[\t ]*Date: .+$\n?", flags=MULTILINE), "", ), "git-squash-heading": ( regexp( r"^[\t ]*Squashed commit of the following:.*$\n?", flags=MULTILINE, ), "", ), } def __init__(self, options: ConventionalCommitParserOptions | None = None) -> None: super().__init__(options) self._logger = getLogger( str.join(".", [self.__module__, self.__class__.__name__]) ) try: commit_type_pattern = regexp( r"(?P%s)" % str.join("|", self.options.allowed_tags) ) except RegexError as err: raise InvalidParserOptions( str.join( "\n", [ f"Invalid options for {self.__class__.__name__}", "Unable to create regular expression from configured commit-types.", "Please check the configured commit-types and remove or escape any regular expression characters.", ], ) ) from err self.commit_subject = regexp( str.join( "", [ f"^{commit_type_pattern.pattern}", r"(?:\((?P[^\n]+)\))?", r"(?P!)?:\s+", r"(?P[^\n]+)", ], ) ) self.commit_msg_pattern = regexp( str.join( "", [ self.commit_subject.pattern, r"(?:\n\n(?P.+))?", # commit body ], ), flags=DOTALL, ) self.filters: dict[str, tuple[Pattern[str], str]] = { **self.common_commit_msg_filters, "git-squash-commit-prefix": ( regexp( str.join( "", [ r"^(?:[\t ]*[*-][\t ]+|[\t ]+)?", # bullet points or indentation commit_type_pattern.pattern + r"\b", # prior to commit type ], ), flags=MULTILINE, ), # move commit type to the start of the line r"\1", ), } def get_default_options(self) -> ConventionalCommitParserOptions: return ConventionalCommitParserOptions() def log_parse_error(self, commit: Commit, error: str) -> ParseError: self._logger.debug(error) return ParseError(commit, error=error) def commit_body_components_separator( self, accumulator: dict[str, list[str]], text: str ) -> dict[str, list[str]]: if (match := breaking_re.match(text)) and (brk_desc := match.group(1)): accumulator["breaking_descriptions"].append(brk_desc) return accumulator if (match := self.notice_selector.match(text)) and ( notice := match.group("notice") ): accumulator["notices"].append(notice) return accumulator if match := self.issue_selector.search(text): # if match := self.issue_selector.search(text): predicate = regexp(r",? and | *[,;/& ] *").sub( ",", match.group("issue_predicate") or "" ) # Almost all issue trackers use a number to reference an issue so # we use a simple regexp to validate the existence of a number which helps filter out # any non-issue references that don't fit our expected format has_number = regexp(r"\d+") new_issue_refs: set[str] = set( filter( lambda issue_str, validator=has_number: validator.search(issue_str), # type: ignore[arg-type] predicate.split(","), ) ) if new_issue_refs: accumulator["linked_issues"] = sort_numerically( set(accumulator["linked_issues"]).union(new_issue_refs) ) return accumulator # Prevent appending duplicate descriptions if text not in accumulator["descriptions"]: accumulator["descriptions"].append(text) return accumulator def parse_message(self, message: str) -> ParsedMessageResult | None: return ( self.create_parsed_message_result(match) if (match := self.commit_msg_pattern.match(message)) else None ) def create_parsed_message_result( self, match: RegexMatch[str] ) -> ParsedMessageResult: parsed_break = match.group("break") parsed_scope = match.group("scope") or "" parsed_subject = match.group("subject") parsed_text = match.group("text") parsed_type = match.group("type") linked_merge_request = "" if mr_match := self.mr_selector.search(parsed_subject): linked_merge_request = mr_match.group("mr_number") parsed_subject = self.mr_selector.sub("", parsed_subject).strip() body_components: dict[str, list[str]] = reduce( self.commit_body_components_separator, [ # Insert the subject before the other paragraphs parsed_subject, *parse_paragraphs(parsed_text or ""), ], { "breaking_descriptions": [], "descriptions": [], "notices": [], "linked_issues": [], }, ) level_bump = ( LevelBump.MAJOR if body_components["breaking_descriptions"] or parsed_break else self.options.tag_to_level.get( parsed_type, self.options.default_bump_level ) ) return ParsedMessageResult( bump=level_bump, type=parsed_type, category=LONG_TYPE_NAMES.get(parsed_type, parsed_type), scope=parsed_scope, descriptions=tuple(body_components["descriptions"]), breaking_descriptions=tuple(body_components["breaking_descriptions"]), release_notices=tuple(body_components["notices"]), linked_issues=tuple(body_components["linked_issues"]), linked_merge_request=linked_merge_request, ) @staticmethod def is_merge_commit(commit: Commit) -> bool: return len(commit.parents) > 1 def parse_commit(self, commit: Commit) -> ParseResult: if not (parsed_msg_result := self.parse_message(force_str(commit.message))): return self.log_parse_error( commit, f"Unable to parse commit message: {commit.message!r}", ) return ParsedCommit.from_parsed_message_result(commit, parsed_msg_result) # Maybe this can be cached as an optimization, similar to how # mypy/pytest use their own caching directories, for very large commit # histories? # The problem is the cache likely won't be present in CI environments def parse(self, commit: Commit) -> ParseResult | list[ParseResult]: """ Parse a commit message If the commit message is a squashed merge commit, it will be split into multiple commits, each of which will be parsed separately. Single commits will be returned as a list of a single ParseResult. """ if self.options.ignore_merge_commits and self.is_merge_commit(commit): return self.log_parse_error( commit, "Ignoring merge commit: %s" % commit.hexsha[:8] ) separate_commits: list[Commit] = ( self.unsquash_commit(commit) if self.options.parse_squash_commits else [commit] ) # Parse each commit individually if there were more than one parsed_commits: list[ParseResult] = list( map(self.parse_commit, separate_commits) ) def add_linked_merge_request( parsed_result: ParseResult, mr_number: str ) -> ParseResult: return ( parsed_result if not isinstance(parsed_result, ParsedCommit) else ParsedCommit( **{ **parsed_result._asdict(), "linked_merge_request": mr_number, } ) ) # TODO: improve this for other VCS systems other than GitHub & BitBucket # Github works as the first commit in a squash merge commit has the PR number # appended to the first line of the commit message lead_commit = next(iter(parsed_commits)) if isinstance(lead_commit, ParsedCommit) and lead_commit.linked_merge_request: # If the first commit has linked merge requests, assume all commits # are part of the same PR and add the linked merge requests to all # parsed commits parsed_commits = [ lead_commit, *map( lambda parsed_result, mr=lead_commit.linked_merge_request: ( # type: ignore[misc] add_linked_merge_request(parsed_result, mr) ), parsed_commits[1:], ), ] elif isinstance(lead_commit, ParseError) and ( mr_match := self.mr_selector.search(force_str(lead_commit.message)) ): # Handle BitBucket Squash Merge Commits (see #1085), which have non angular commit # format but include the PR number in the commit subject that we want to extract linked_merge_request = mr_match.group("mr_number") # apply the linked MR to all commits parsed_commits = [ add_linked_merge_request(parsed_result, linked_merge_request) for parsed_result in parsed_commits ] return parsed_commits def unsquash_commit(self, commit: Commit) -> list[Commit]: # GitHub EXAMPLE: # feat(changelog): add autofit_text_width filter to template environment (#1062) # # This change adds an equivalent style formatter that can apply a text alignment # to a maximum width and also maintain an indent over paragraphs of text # # * docs(changelog-templates): add definition & usage of autofit_text_width template filter # # * test(changelog-context): add test cases to check autofit_text_width filter use # # `git merge --squash` EXAMPLE: # Squashed commit of the following: # # commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb # Author: codejedi365 # Date: Sun Oct 13 12:05:23 2024 -0600 # # feat(release-config): some commit subject # # Return a list of artificial commits (each with a single commit message) return [ # create a artificial commit object (copy of original but with modified message) Commit( **{ **deep_copy_commit(commit), "message": commit_msg, } ) for commit_msg in self.unsquash_commit_message(force_str(commit.message)) ] or [commit] def unsquash_commit_message(self, message: str) -> list[str]: normalized_message = message.replace("\r", "").strip() # split by obvious separate commits (applies to manual git squash merges) obvious_squashed_commits = self.filters["git-header-commit"][0].split( normalized_message ) separate_commit_msgs: list[str] = reduce( lambda all_msgs, msgs: all_msgs + msgs, map(self._find_squashed_commits_in_str, obvious_squashed_commits), [], ) return list(filter(None, separate_commit_msgs)) def _find_squashed_commits_in_str(self, message: str) -> list[str]: separate_commit_msgs: list[str] = [] current_msg = "" for paragraph in filter(None, message.strip().split("\n\n")): # Apply filters to normalize the paragraph clean_paragraph = reduce(text_reducer, self.filters.values(), paragraph) # remove any filtered (and now empty) paragraphs (ie. the git headers) if not clean_paragraph.strip(): continue # Check if the paragraph is the start of a new conventional commit # Note: that we check that the subject has more than one word to differentiate from # a closing footer (e.g. "fix: #123", or "fix: ABC-123") if (match := self.commit_subject.search(clean_paragraph)) and len( match.group("subject").split(" ") ) > 1: # Since we found the start of the new commit, store any previous commit # message separately and start the new commit message if current_msg: separate_commit_msgs.append(current_msg) current_msg = clean_paragraph continue if not separate_commit_msgs and not current_msg: # if there are no separate commit messages and no current message # then this is the first commit message current_msg = dedent(clean_paragraph) continue # append the paragraph as part of the previous commit message if current_msg: current_msg += f"\n\n{dedent(clean_paragraph)}" # else: drop the paragraph continue return [*separate_commit_msgs, current_msg] python-semantic-release-10.4.1/src/semantic_release/commit_parser/conventional/parser_monorepo.py000066400000000000000000000513051506116242600335650ustar00rootroot00000000000000from __future__ import annotations import os from fnmatch import fnmatch from logging import getLogger from pathlib import Path, PurePath, PurePosixPath, PureWindowsPath from re import DOTALL, compile as regexp, error as RegexError # noqa: N812 from typing import TYPE_CHECKING from semantic_release.commit_parser._base import CommitParser from semantic_release.commit_parser.conventional.options import ( ConventionalCommitParserOptions, ) from semantic_release.commit_parser.conventional.options_monorepo import ( ConventionalCommitMonorepoParserOptions, ) from semantic_release.commit_parser.conventional.parser import ConventionalCommitParser from semantic_release.commit_parser.token import ( ParsedCommit, ParsedMessageResult, ParseError, ParseResult, ) from semantic_release.commit_parser.util import force_str from semantic_release.errors import InvalidParserOptions if TYPE_CHECKING: # pragma: no cover from git.objects.commit import Commit class ConventionalCommitMonorepoParser( CommitParser[ParseResult, ConventionalCommitMonorepoParserOptions] ): # TODO: Remove for v11 compatibility, get_default_options() will be called instead parser_options = ConventionalCommitMonorepoParserOptions def __init__( self, options: ConventionalCommitMonorepoParserOptions | None = None ) -> None: super().__init__(options) try: commit_scope_pattern = regexp( r"\(" + self.options.scope_prefix + r"(?P[^\n]+)?\)", ) except RegexError as err: raise InvalidParserOptions( str.join( "\n", [ f"Invalid options for {self.__class__.__name__}", "Unable to create regular expression from configured scope_prefix.", "Please check the configured scope_prefix and remove or escape any regular expression characters.", ], ) ) from err try: commit_type_pattern = regexp( r"(?P%s)" % str.join("|", self.options.allowed_tags) ) except RegexError as err: raise InvalidParserOptions( str.join( "\n", [ f"Invalid options for {self.__class__.__name__}", "Unable to create regular expression from configured commit-types.", "Please check the configured commit-types and remove or escape any regular expression characters.", ], ) ) from err # This regular expression includes scope prefix into the pattern and forces a scope to be present # PSR will match the full scope but we don't include it in the scope match, # which implicitly strips it from being included in the returned scope. self._strict_scope_pattern = regexp( str.join( "", [ r"^" + commit_type_pattern.pattern, commit_scope_pattern.pattern, r"(?P!)?:\s+", r"(?P[^\n]+)", r"(?:\n\n(?P.+))?", # commit body ], ), flags=DOTALL, ) self._optional_scope_pattern = regexp( str.join( "", [ r"^" + commit_type_pattern.pattern, r"(?:\((?P[^\n]+)\))?", r"(?P!)?:\s+", r"(?P[^\n]+)", r"(?:\n\n(?P.+))?", # commit body ], ), flags=DOTALL, ) file_select_filters, file_ignore_filters = self._process_path_filter_options( self.options.path_filters ) self._file_selection_filters: list[str] = file_select_filters self._file_ignore_filters: list[str] = file_ignore_filters self._logger = getLogger( str.join(".", [self.__module__, self.__class__.__name__]) ) self._base_parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( **{ k: getattr(self.options, k) for k in ConventionalCommitParserOptions().__dataclass_fields__ } ) ) def get_default_options(self) -> ConventionalCommitMonorepoParserOptions: return ConventionalCommitMonorepoParserOptions() @staticmethod def _process_path_filter_options( # noqa: C901 path_filters: tuple[str, ...], ) -> tuple[list[str], list[str]]: file_ignore_filters: list[str] = [] file_selection_filters: list[str] = [] unique_selection_filters: set[str] = set() unique_ignore_filters: set[str] = set() for str_path in path_filters: str_filter = str_path[1:] if str_path.startswith("!") else str_path filter_list = ( file_ignore_filters if str_path.startswith("!") else file_selection_filters ) unique_cache = ( unique_ignore_filters if str_path.startswith("!") else unique_selection_filters ) # Since fnmatch is not too flexible, we will expand the path filters to include the name and any subdirectories # as this is how gitignore is interpreted. Possible scenarios: # | Input | Path Normalization | Filter List | # | ---------- | ------------------ | ------------------------- | # | / | / | /** | done # | /./ | / | /** | done # | /** | /** | /** | done # | /./** | /** | /** | done # | /* | /* | /* | done # | . | . | ./** | done # | ./ | . | ./** | done # | ././ | . | ./** | done # | ./** | ./** | ./** | done # | ./* | ./* | ./* | done # | .. | .. | ../** | done # | ../ | .. | ../** | done # | ../** | ../** | ../** | done # | ../* | ../* | ../* | done # | ../.. | ../.. | ../../** | done # | ../../ | ../../ | ../../** | done # | ../../docs | ../../docs | ../../docs, ../../docs/** | done # | src | src | src, src/** | done # | src/ | src | src/** | done # | src/* | src/* | src/* | done # | src/** | src/** | src/** | done # | /src | /src | /src, /src/** | done # | /src/ | /src | /src/** | done # | /src/** | /src/** | /src/** | done # | /src/* | /src/* | /src/* | done # | ../d/f.txt | ../d/f.txt | ../d/f.txt, ../d/f.txt/** | done # This expansion will occur regardless of the negation prefix os_path: PurePath | PurePosixPath | PureWindowsPath = PurePath(str_filter) if r"\\" in str_filter: # Windows paths were given so we convert them to posix paths os_path = PureWindowsPath(str_filter) os_path = ( PureWindowsPath( os_path.root, *os_path.parts[1:] ) # drop any drive letter if os_path.is_absolute() else os_path ) os_path = PurePosixPath(os_path.as_posix()) path_normalized = str(os_path) if path_normalized == str( Path(".").absolute().root ) or path_normalized == str(Path("/**")): path_normalized = "/**" elif path_normalized == str(Path("/*")): pass elif path_normalized == str(Path(".")) or path_normalized == str( Path("./**") ): path_normalized = "./**" elif path_normalized == str(Path("./*")): path_normalized = "./*" elif path_normalized == str(Path("..")) or path_normalized == str( Path("../**") ): path_normalized = "../**" elif path_normalized == str(Path("../*")): path_normalized = "../*" elif path_normalized.endswith(("..", "../**")): path_normalized = f"{path_normalized.rstrip('*')}/**" elif str_filter.endswith(os.sep): # If the path ends with a separator, it is a directory, so we add the directory and all subdirectories path_normalized = f"{path_normalized}/**" elif not path_normalized.endswith("*"): all_subdirs = f"{path_normalized}/**" if all_subdirs not in unique_cache: unique_cache.add(all_subdirs) filter_list.append(all_subdirs) # And fall through to add the path as is # END IF # Add the normalized path to the filter list if it is not already present if path_normalized not in unique_cache: unique_cache.add(path_normalized) filter_list.append(path_normalized) return file_selection_filters, file_ignore_filters def logged_parse_error(self, commit: Commit, error: str) -> ParseError: self._logger.debug(error) return ParseError(commit, error=error) def parse(self, commit: Commit) -> ParseResult | list[ParseResult]: if self.options.ignore_merge_commits and self._base_parser.is_merge_commit( commit ): return self._base_parser.log_parse_error( commit, "Ignoring merge commit: %s" % commit.hexsha[:8] ) separate_commits: list[Commit] = ( self._base_parser.unsquash_commit(commit) if self.options.parse_squash_commits else [commit] ) # Parse each commit individually if there were more than one parsed_commits: list[ParseResult] = list( map(self.parse_commit, separate_commits) ) def add_linked_merge_request( parsed_result: ParseResult, mr_number: str ) -> ParseResult: return ( parsed_result if not isinstance(parsed_result, ParsedCommit) else ParsedCommit( **{ **parsed_result._asdict(), "linked_merge_request": mr_number, } ) ) # TODO: improve this for other VCS systems other than GitHub & BitBucket # Github works as the first commit in a squash merge commit has the PR number # appended to the first line of the commit message lead_commit = next(iter(parsed_commits)) if isinstance(lead_commit, ParsedCommit) and lead_commit.linked_merge_request: # If the first commit has linked merge requests, assume all commits # are part of the same PR and add the linked merge requests to all # parsed commits parsed_commits = [ lead_commit, *map( lambda parsed_result, mr=lead_commit.linked_merge_request: ( # type: ignore[misc] add_linked_merge_request(parsed_result, mr) ), parsed_commits[1:], ), ] elif isinstance(lead_commit, ParseError) and ( mr_match := self._base_parser.mr_selector.search( force_str(lead_commit.message) ) ): # Handle BitBucket Squash Merge Commits (see #1085), which have non angular commit # format but include the PR number in the commit subject that we want to extract linked_merge_request = mr_match.group("mr_number") # apply the linked MR to all commits parsed_commits = [ add_linked_merge_request(parsed_result, linked_merge_request) for parsed_result in parsed_commits ] return parsed_commits def parse_message( self, message: str, strict_scope: bool = False ) -> ParsedMessageResult | None: if ( not (parsed_match := self._strict_scope_pattern.match(message)) and strict_scope ): return None if not parsed_match and not ( parsed_match := self._optional_scope_pattern.match(message) ): return None return self._base_parser.create_parsed_message_result(parsed_match) def parse_commit(self, commit: Commit) -> ParseResult: """Attempt to parse the commit message with a regular expression into a ParseResult.""" # Multiple scenarios to consider when parsing a commit message [Truth table]: # ======================================================================================================= # | || INPUTS || | # | # ||------------------------+----------------+--------------|| Result | # | || Example Commit Message | Relevant Files | Scope Prefix || | # |----||------------------------+----------------+--------------||-------------------------------------| # | 1 || type(prefix-cli): msg | yes | "prefix-" || ParsedCommit | # | 2 || type(prefix-cli): msg | yes | "" || ParsedCommit | # | 3 || type(prefix-cli): msg | no | "prefix-" || ParsedCommit | # | 4 || type(prefix-cli): msg | no | "" || ParseError[No files] | # | 5 || type(scope-cli): msg | yes | "prefix-" || ParsedCommit | # | 6 || type(scope-cli): msg | yes | "" || ParsedCommit | # | 7 || type(scope-cli): msg | no | "prefix-" || ParseError[No files & wrong scope] | # | 8 || type(scope-cli): msg | no | "" || ParseError[No files] | # | 9 || type(cli): msg | yes | "prefix-" || ParsedCommit | # | 10 || type(cli): msg | yes | "" || ParsedCommit | # | 11 || type(cli): msg | no | "prefix-" || ParseError[No files & wrong scope] | # | 12 || type(cli): msg | no | "" || ParseError[No files] | # | 13 || type: msg | yes | "prefix-" || ParsedCommit | # | 14 || type: msg | yes | "" || ParsedCommit | # | 15 || type: msg | no | "prefix-" || ParseError[No files & wrong scope] | # | 16 || type: msg | no | "" || ParseError[No files] | # | 17 || non-conventional msg | yes | "prefix-" || ParseError[Invalid Syntax] | # | 18 || non-conventional msg | yes | "" || ParseError[Invalid Syntax] | # | 19 || non-conventional msg | no | "prefix-" || ParseError[Invalid Syntax] | # | 20 || non-conventional msg | no | "" || ParseError[Invalid Syntax] | # ======================================================================================================= # Initial Logic Flow: # [1] When there are no relevant files and a scope prefix is defined, we enforce a strict scope # [2] When there are no relevant files and no scope prefix is defined, we parse scoped or unscoped commits # [3] When there are relevant files, we parse scoped or unscoped commits regardless of any defined prefix has_relevant_changed_files = self._has_relevant_changed_files(commit) strict_scope = bool( not has_relevant_changed_files and self.options.scope_prefix ) pmsg_result = self.parse_message( message=force_str(commit.message), strict_scope=strict_scope, ) if pmsg_result and (has_relevant_changed_files or strict_scope): self._logger.debug( "commit %s introduces a %s level_bump", commit.hexsha[:8], pmsg_result.bump, ) return ParsedCommit.from_parsed_message_result(commit, pmsg_result) if pmsg_result and not has_relevant_changed_files: return self.logged_parse_error( commit, f"Commit {commit.hexsha[:7]} has no changed files matching the path filter(s)", ) if strict_scope and self.parse_message(str(commit.message), strict_scope=False): return self.logged_parse_error( commit, str.join( " and ", [ f"Commit {commit.hexsha[:7]} has no changed files matching the path filter(s)", f"the scope does not match scope prefix '{self.options.scope_prefix}'", ], ), ) return self.logged_parse_error( commit, f"Format Mismatch! Unable to parse commit message: {commit.message!r}", ) def unsquash_commit_message(self, message: str) -> list[str]: return self._base_parser.unsquash_commit_message(message) def _has_relevant_changed_files(self, commit: Commit) -> bool: # Extract git root from commit git_root = ( Path(commit.repo.working_tree_dir or commit.repo.working_dir) .absolute() .resolve() ) cwd = Path.cwd().absolute().resolve() rel_cwd = cwd.relative_to(git_root) if git_root in cwd.parents else Path(".") sandboxed_selection_filters: list[str] = [ str(file_filter) for file_filter in ( ( git_root / select_filter.rstrip("/") if Path(select_filter).is_absolute() else git_root / rel_cwd / select_filter ) for select_filter in self._file_selection_filters ) if git_root in file_filter.parents ] sandboxed_ignore_filters: list[str] = [ str(file_filter) for file_filter in ( ( git_root / ignore_filter.rstrip("/") if Path(ignore_filter).is_absolute() else git_root / rel_cwd / ignore_filter ) for ignore_filter in self._file_ignore_filters ) if git_root in file_filter.parents ] # Check if the changed files of the commit that match the path filters for full_path in iter( str(git_root / rel_git_path) for rel_git_path in commit.stats.files ): # Check if the filepath matches any of the file selection filters if not any( fnmatch(full_path, select_filter) for select_filter in sandboxed_selection_filters ): continue # Pass filter matches, so now evaluate if it is supposed to be ignored if not any( fnmatch(full_path, ignore_filter) for ignore_filter in sandboxed_ignore_filters ): # No ignore filter matched, so it must be a relevant file return True return False python-semantic-release-10.4.1/src/semantic_release/commit_parser/emoji.py000066400000000000000000000425251506116242600267630ustar00rootroot00000000000000"""Commit parser which looks for emojis to determine the type of commit""" from __future__ import annotations import re from functools import reduce from itertools import zip_longest from re import compile as regexp from textwrap import dedent from typing import Tuple from git.objects.commit import Commit from pydantic.dataclasses import dataclass from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.token import ( ParsedCommit, ParsedMessageResult, ParseError, ParseResult, ) from semantic_release.commit_parser.util import ( deep_copy_commit, force_str, parse_paragraphs, ) from semantic_release.enums import LevelBump from semantic_release.errors import InvalidParserOptions from semantic_release.globals import logger from semantic_release.helpers import sort_numerically, text_reducer @dataclass class EmojiParserOptions(ParserOptions): """Options dataclass for EmojiCommitParser""" major_tags: Tuple[str, ...] = (":boom:",) """Commit-type prefixes that should result in a major release bump.""" minor_tags: Tuple[str, ...] = ( ":sparkles:", ":children_crossing:", ":lipstick:", ":iphone:", ":egg:", ":chart_with_upwards_trend:", ) """Commit-type prefixes that should result in a minor release bump.""" patch_tags: Tuple[str, ...] = ( ":ambulance:", ":lock:", ":bug:", ":zap:", ":goal_net:", ":alien:", ":wheelchair:", ":speech_balloon:", ":mag:", ":apple:", ":penguin:", ":checkered_flag:", ":robot:", ":green_apple:", ) """Commit-type prefixes that should result in a patch release bump.""" other_allowed_tags: Tuple[str, ...] = (":memo:", ":checkmark:") """Commit-type prefixes that are allowed but do not result in a version bump.""" allowed_tags: Tuple[str, ...] = ( *major_tags, *minor_tags, *patch_tags, *other_allowed_tags, ) """All commit-type prefixes that are allowed.""" default_bump_level: LevelBump = LevelBump.NO_RELEASE """The minimum bump level to apply to valid commit message.""" parse_linked_issues: bool = False """ Whether to parse linked issues from the commit message. Issue identification is not defined in the Gitmoji specification, so this parser will not attempt to parse issues by default. If enabled, the parser will use the same identification as GitHub, GitLab, and BitBucket use for linking issues, which is to look for a git commit message footer starting with "Closes:", "Fixes:", or "Resolves:" then a space, and then the issue identifier. The line prefix can be singular or plural and it is not case-sensitive but must have a colon and a whitespace separator. """ parse_squash_commits: bool = True """Toggle flag for whether or not to parse squash commits""" ignore_merge_commits: bool = True """Toggle flag for whether or not to ignore merge commits""" @property def tag_to_level(self) -> dict[str, LevelBump]: """A mapping of commit tags to the level bump they should result in.""" return self._tag_to_level def __post_init__(self) -> None: self._tag_to_level: dict[str, LevelBump] = { str(tag): level for tag, level in [ # we have to do a type ignore as zip_longest provides a type that is not specific enough # for our expected output. Due to the empty second array, we know the first is always longest # and that means no values in the first entry of the tuples will ever be a LevelBump. We # apply a str() to make mypy happy although it will never happen. *zip_longest(self.allowed_tags, (), fillvalue=self.default_bump_level), *zip_longest(self.patch_tags, (), fillvalue=LevelBump.PATCH), *zip_longest(self.minor_tags, (), fillvalue=LevelBump.MINOR), *zip_longest(self.major_tags, (), fillvalue=LevelBump.MAJOR), ] if "|" not in str(tag) } class EmojiCommitParser(CommitParser[ParseResult, EmojiParserOptions]): """ Parse a commit using an emoji in the subject line. When multiple emojis are encountered, the one with the highest bump level is used. If there are multiple emojis on the same level, the we use the one listed earliest in the configuration. If the message does not contain any known emojis, then the level to bump will be 0 and the type of change "Other". This parser never raises UnknownCommitMessageStyleError. Emojis are not removed from the description, and will appear alongside the commit subject in the changelog. """ # TODO: Deprecate in lieu of get_default_options() parser_options = EmojiParserOptions def __init__(self, options: EmojiParserOptions | None = None) -> None: super().__init__(options) # Reverse the list of tags to ensure that the highest level tags are matched first emojis_in_precedence_order = list(self.options.tag_to_level.keys())[::-1] try: highest_emoji_pattern = regexp( r"(?P%s)" % str.join("|", emojis_in_precedence_order) ) except re.error as err: raise InvalidParserOptions( str.join( "\n", [ f"Invalid options for {self.__class__.__name__}", "Unable to create regular expression from configured commit-types.", "Please check the configured commit-types and remove or escape any regular expression characters.", ], ) ) from err self.emoji_selector = regexp( str.join( "", [ f"^{highest_emoji_pattern.pattern}", r"(?:\((?P[^)]+)\))?:?", ], ) ) # GitHub & Gitea use (#123), GitLab uses (!123), and BitBucket uses (pull request #123) self.mr_selector = regexp( r"[\t ]+\((?:pull request )?(?P[#!]\d+)\)[\t ]*$" ) self.issue_selector = regexp( str.join( "", [ r"^(?:clos(?:e|es|ed|ing)|fix(?:es|ed|ing)?|resolv(?:e|es|ed|ing)|implement(?:s|ed|ing)?):", r"[\t ]+(?P.+)[\t ]*$", ], ), flags=re.MULTILINE | re.IGNORECASE, ) self.notice_selector = regexp(r"^NOTICE: (?P.+)$") self.filters = { "typo-extra-spaces": (regexp(r"(\S) +(\S)"), r"\1 \2"), "git-header-commit": ( regexp(r"^[\t ]*commit [0-9a-f]+$\n?", flags=re.MULTILINE), "", ), "git-header-author": ( regexp(r"^[\t ]*Author: .+$\n?", flags=re.MULTILINE), "", ), "git-header-date": ( regexp(r"^[\t ]*Date: .+$\n?", flags=re.MULTILINE), "", ), "git-squash-heading": ( regexp( r"^[\t ]*Squashed commit of the following:.*$\n?", flags=re.MULTILINE, ), "", ), "git-squash-commit-prefix": ( regexp( str.join( "", [ r"^(?:[\t ]*[*-][\t ]+|[\t ]+)?", # bullet points or indentation highest_emoji_pattern.pattern + r"(\W)", # prior to commit type ], ), flags=re.MULTILINE, ), # move commit type to the start of the line r"\1\2", ), } @staticmethod def get_default_options() -> EmojiParserOptions: return EmojiParserOptions() def commit_body_components_separator( self, accumulator: dict[str, list[str]], text: str ) -> dict[str, list[str]]: if (match := self.notice_selector.match(text)) and ( notice := match.group("notice") ): accumulator["notices"].append(notice) return accumulator if self.options.parse_linked_issues and ( match := self.issue_selector.search(text) ): predicate = regexp(r",? and | *[,;/& ] *").sub( ",", match.group("issue_predicate") or "" ) # Almost all issue trackers use a number to reference an issue so # we use a simple regexp to validate the existence of a number which helps filter out # any non-issue references that don't fit our expected format has_number = regexp(r"\d+") new_issue_refs: set[str] = set( filter( lambda issue_str, validator=has_number: validator.search(issue_str), # type: ignore[arg-type] predicate.split(","), ) ) if new_issue_refs: accumulator["linked_issues"] = sort_numerically( set(accumulator["linked_issues"]).union(new_issue_refs) ) return accumulator # Prevent appending duplicate descriptions if text not in accumulator["descriptions"]: accumulator["descriptions"].append(text) return accumulator def parse_message(self, message: str) -> ParsedMessageResult: msg_parts = message.split("\n", maxsplit=1) subject = msg_parts[0] msg_body = msg_parts[1] if len(msg_parts) > 1 else "" linked_merge_request = "" if mr_match := self.mr_selector.search(subject): linked_merge_request = mr_match.group("mr_number") subject = self.mr_selector.sub("", subject).strip() # Search for emoji of the highest importance in the subject match = self.emoji_selector.search(subject) primary_emoji = match.group("type") if match else "Other" parsed_scope = (match.group("scope") if match else None) or "" level_bump = self.options.tag_to_level.get( primary_emoji, self.options.default_bump_level ) # All emojis will remain part of the returned description body_components: dict[str, list[str]] = reduce( self.commit_body_components_separator, [ subject, *parse_paragraphs(msg_body), ], { "descriptions": [], "notices": [], "linked_issues": [], }, ) descriptions = tuple(body_components["descriptions"]) return ParsedMessageResult( bump=level_bump, type=primary_emoji, category=primary_emoji, scope=parsed_scope, descriptions=( descriptions[:1] if level_bump is LevelBump.MAJOR else descriptions ), breaking_descriptions=( descriptions[1:] if level_bump is LevelBump.MAJOR else () ), release_notices=tuple(body_components["notices"]), linked_issues=tuple(body_components["linked_issues"]), linked_merge_request=linked_merge_request, ) @staticmethod def is_merge_commit(commit: Commit) -> bool: return len(commit.parents) > 1 def parse_commit(self, commit: Commit) -> ParseResult: return ParsedCommit.from_parsed_message_result( commit, self.parse_message(force_str(commit.message)) ) def parse(self, commit: Commit) -> ParseResult | list[ParseResult]: """ Parse a commit message If the commit message is a squashed merge commit, it will be split into multiple commits, each of which will be parsed separately. Single commits will be returned as a list of a single ParseResult. """ if self.options.ignore_merge_commits and self.is_merge_commit(commit): err_msg = "Ignoring merge commit: %s" % commit.hexsha[:8] logger.debug(err_msg) return ParseError(commit, err_msg) separate_commits: list[Commit] = ( self.unsquash_commit(commit) if self.options.parse_squash_commits else [commit] ) # Parse each commit individually if there were more than one parsed_commits: list[ParseResult] = list( map(self.parse_commit, separate_commits) ) def add_linked_merge_request( parsed_result: ParseResult, mr_number: str ) -> ParseResult: return ( parsed_result if not isinstance(parsed_result, ParsedCommit) else ParsedCommit( **{ **parsed_result._asdict(), "linked_merge_request": mr_number, } ) ) # TODO: improve this for other VCS systems other than GitHub & BitBucket # Github works as the first commit in a squash merge commit has the PR number # appended to the first line of the commit message lead_commit = next(iter(parsed_commits)) if isinstance(lead_commit, ParsedCommit) and lead_commit.linked_merge_request: # If the first commit has linked merge requests, assume all commits # are part of the same PR and add the linked merge requests to all # parsed commits parsed_commits = [ lead_commit, *map( lambda parsed_result, mr=lead_commit.linked_merge_request: ( # type: ignore[misc] add_linked_merge_request(parsed_result, mr) ), parsed_commits[1:], ), ] return parsed_commits def unsquash_commit(self, commit: Commit) -> list[Commit]: # GitHub EXAMPLE: # ✨(changelog): add autofit_text_width filter to template environment (#1062) # # This change adds an equivalent style formatter that can apply a text alignment # to a maximum width and also maintain an indent over paragraphs of text # # * 🌠Support Japanese language # # * ✅(changelog-context): add test cases to check autofit_text_width filter use # # `git merge --squash` EXAMPLE: # Squashed commit of the following: # # commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb # Author: codejedi365 # Date: Sun Oct 13 12:05:23 2024 -0000 # # âš¡ï¸ (homepage): Lazyload home screen images # # # Return a list of artificial commits (each with a single commit message) return [ # create a artificial commit object (copy of original but with modified message) Commit( **{ **deep_copy_commit(commit), "message": commit_msg, } ) for commit_msg in self.unsquash_commit_message(force_str(commit.message)) ] or [commit] def unsquash_commit_message(self, message: str) -> list[str]: normalized_message = message.replace("\r", "").strip() # split by obvious separate commits (applies to manual git squash merges) obvious_squashed_commits = self.filters["git-header-commit"][0].split( normalized_message ) separate_commit_msgs: list[str] = reduce( lambda all_msgs, msgs: all_msgs + msgs, map(self._find_squashed_commits_in_str, obvious_squashed_commits), [], ) return list(filter(None, separate_commit_msgs)) def _find_squashed_commits_in_str(self, message: str) -> list[str]: separate_commit_msgs: list[str] = [] current_msg = "" for paragraph in filter(None, message.strip().split("\n\n")): # Apply filters to normalize the paragraph clean_paragraph = reduce(text_reducer, self.filters.values(), paragraph) # remove any filtered (and now empty) paragraphs (ie. the git headers) if not clean_paragraph.strip(): continue # Check if the paragraph is the start of a new emoji commit if not self.emoji_selector.search(clean_paragraph): if not separate_commit_msgs and not current_msg: # if there are no separate commit messages and no current message # then this is the first commit message current_msg = dedent(clean_paragraph) continue # append the paragraph as part of the previous commit message if current_msg: current_msg += f"\n\n{dedent(clean_paragraph)}" # else: drop the paragraph continue # Since we found the start of the new commit, store any previous commit # message separately and start the new commit message if current_msg: separate_commit_msgs.append(current_msg) current_msg = clean_paragraph return [*separate_commit_msgs, current_msg] python-semantic-release-10.4.1/src/semantic_release/commit_parser/scipy.py000066400000000000000000000460071506116242600270060ustar00rootroot00000000000000""" Parses commit messages using `scipy tags `_ of the form:: (): The elements , and are optional. If no tag is present, the commit will be added to the changelog section "None" and no version increment will be performed. While is supported here it isn't actually part of the scipy style. If it is missing, parentheses around it are too. The commit should then be of the form:: : To communicate a breaking change add "BREAKING CHANGE" into the body at the beginning of a paragraph. Fill this paragraph with information how to migrate from the broken behavior to the new behavior. It will be added to the "Breaking" section of the changelog. Supported Tags:: ( API, DEP, ENH, REV, BUG, MAINT, BENCH, BLD, ) DEV, DOC, STY, TST, REL, FEAT, TEST Supported Changelog Sections:: breaking, feature, fix, Other, None .. _`scipy-style`: https://docs.scipy.org/doc/scipy/reference/dev/contributor/development_workflow.html#writing-the-commit-message """ from __future__ import annotations import re from functools import reduce from itertools import zip_longest from re import compile as regexp from textwrap import dedent from typing import TYPE_CHECKING, Tuple from git.objects.commit import Commit from pydantic.dataclasses import dataclass from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.token import ( ParsedCommit, ParsedMessageResult, ParseError, ParseResult, ) from semantic_release.commit_parser.util import ( deep_copy_commit, force_str, parse_paragraphs, ) from semantic_release.enums import LevelBump from semantic_release.errors import InvalidParserOptions from semantic_release.globals import logger from semantic_release.helpers import sort_numerically, text_reducer if TYPE_CHECKING: # pragma: no cover from git.objects.commit import Commit def _logged_parse_error(commit: Commit, error: str) -> ParseError: logger.debug(error) return ParseError(commit, error=error) tag_to_section = { "API": "breaking", "BENCH": "none", "BLD": "fix", "BUG": "fix", "DEP": "breaking", "DEV": "none", "DOC": "documentation", "ENH": "feature", "MAINT": "fix", "REV": "other", "STY": "none", "TST": "none", "REL": "none", # strictly speaking not part of the standard "FEAT": "feature", "TEST": "none", } @dataclass class ScipyParserOptions(ParserOptions): """ Options dataclass for ScipyCommitParser Scipy-style commit messages follow the same format as Angular-style commit just with different tag names. """ major_tags: Tuple[str, ...] = ("API", "DEP") """Commit-type prefixes that should result in a major release bump.""" minor_tags: Tuple[str, ...] = ("ENH", "FEAT") """Commit-type prefixes that should result in a minor release bump.""" patch_tags: Tuple[str, ...] = ("BLD", "BUG", "MAINT") """Commit-type prefixes that should result in a patch release bump.""" other_allowed_tags: Tuple[str, ...] = ( # "REV", # Revert commits are NOT Currently Supported "DEV", "BENCH", "DOC", "STY", "TST", "REL", "TEST", ) """Commit-type prefixes that are allowed but do not result in a version bump.""" allowed_tags: Tuple[str, ...] = ( *major_tags, *minor_tags, *patch_tags, *other_allowed_tags, ) """ All commit-type prefixes that are allowed. These are used to identify a valid commit message. If a commit message does not start with one of these prefixes, it will not be considered a valid commit message. """ # TODO: breaking v11, make consistent with AngularParserOptions default_level_bump: LevelBump = LevelBump.NO_RELEASE """The minimum bump level to apply to valid commit message.""" parse_squash_commits: bool = True """Toggle flag for whether or not to parse squash commits""" ignore_merge_commits: bool = True """Toggle flag for whether or not to ignore merge commits""" @property def tag_to_level(self) -> dict[str, LevelBump]: """A mapping of commit tags to the level bump they should result in.""" return self._tag_to_level def __post_init__(self) -> None: # TODO: breaking v11, remove as the name is now consistent self.default_bump_level = self.default_level_bump self._tag_to_level: dict[str, LevelBump] = { str(tag): level for tag, level in [ # we have to do a type ignore as zip_longest provides a type that is not specific enough # for our expected output. Due to the empty second array, we know the first is always longest # and that means no values in the first entry of the tuples will ever be a LevelBump. We # apply a str() to make mypy happy although it will never happen. *zip_longest(self.allowed_tags, (), fillvalue=self.default_bump_level), *zip_longest(self.patch_tags, (), fillvalue=LevelBump.PATCH), *zip_longest(self.minor_tags, (), fillvalue=LevelBump.MINOR), *zip_longest(self.major_tags, (), fillvalue=LevelBump.MAJOR), ] if "|" not in str(tag) } class ScipyCommitParser(CommitParser[ParseResult, ScipyParserOptions]): """Parser for scipy-style commit messages""" # TODO: Deprecate in lieu of get_default_options() parser_options = ScipyParserOptions def __init__(self, options: ScipyParserOptions | None = None) -> None: super().__init__(options) try: commit_type_pattern = regexp( r"(?P%s)" % str.join("|", self.options.allowed_tags) ) except re.error as err: raise InvalidParserOptions( str.join( "\n", [ f"Invalid options for {self.__class__.__name__}", "Unable to create regular expression from configured commit-types.", "Please check the configured commit-types and remove or escape any regular expression characters.", ], ) ) from err self.commit_prefix = regexp( str.join( "", [ f"^{commit_type_pattern.pattern}", r"(?::[\t ]*(?P[^:\n]+))?", r":[\t ]+", ], ) ) self.commit_msg_pattern = regexp( str.join( "", [ self.commit_prefix.pattern, r"(?P[^\n]+)", r"(?:\n\n(?P.+))?", # commit body ], ), flags=re.DOTALL, ) # GitHub & Gitea use (#123), GitLab uses (!123), and BitBucket uses (pull request #123) self.mr_selector = regexp( r"[\t ]+\((?:pull request )?(?P[#!]\d+)\)[\t ]*$" ) self.issue_selector = regexp( str.join( "", [ r"^(?:clos(?:e|es|ed|ing)|fix(?:es|ed|ing)?|resolv(?:e|es|ed|ing)|implement(?:s|ed|ing)?):", r"[\t ]+(?P.+)[\t ]*$", ], ), flags=re.MULTILINE | re.IGNORECASE, ) self.notice_selector = regexp(r"^NOTICE: (?P.+)$") self.filters = { "typo-extra-spaces": (regexp(r"(\S) +(\S)"), r"\1 \2"), "git-header-commit": ( regexp(r"^[\t ]*commit [0-9a-f]+$\n?", flags=re.MULTILINE), "", ), "git-header-author": ( regexp(r"^[\t ]*Author: .+$\n?", flags=re.MULTILINE), "", ), "git-header-date": ( regexp(r"^[\t ]*Date: .+$\n?", flags=re.MULTILINE), "", ), "git-squash-heading": ( regexp( r"^[\t ]*Squashed commit of the following:.*$\n?", flags=re.MULTILINE, ), "", ), "git-squash-commit-prefix": ( regexp( str.join( "", [ r"^(?:[\t ]*[*-][\t ]+|[\t ]+)?", # bullet points or indentation commit_type_pattern.pattern + r"\b", # prior to commit type ], ), flags=re.MULTILINE, ), # move commit type to the start of the line r"\1", ), } @staticmethod def get_default_options() -> ScipyParserOptions: return ScipyParserOptions() def commit_body_components_separator( self, accumulator: dict[str, list[str]], text: str ) -> dict[str, list[str]]: if (match := self.notice_selector.match(text)) and ( notice := match.group("notice") ): accumulator["notices"].append(notice) return accumulator if match := self.issue_selector.search(text): # if match := self.issue_selector.search(text): predicate = regexp(r",? and | *[,;/& ] *").sub( ",", match.group("issue_predicate") or "" ) # Almost all issue trackers use a number to reference an issue so # we use a simple regexp to validate the existence of a number which helps filter out # any non-issue references that don't fit our expected format has_number = regexp(r"\d+") new_issue_refs: set[str] = set( filter( lambda issue_str, validator=has_number: validator.search(issue_str), # type: ignore[arg-type] predicate.split(","), ) ) if new_issue_refs: accumulator["linked_issues"] = sort_numerically( set(accumulator["linked_issues"]).union(new_issue_refs) ) return accumulator # Prevent appending duplicate descriptions if text not in accumulator["descriptions"]: accumulator["descriptions"].append(text) return accumulator def parse_message(self, message: str) -> ParsedMessageResult | None: if not (parsed := self.commit_msg_pattern.match(message)): return None parsed_scope = parsed.group("scope") or "" parsed_subject = parsed.group("subject") parsed_text = parsed.group("text") parsed_type = parsed.group("type") linked_merge_request = "" if mr_match := self.mr_selector.search(parsed_subject): linked_merge_request = mr_match.group("mr_number") parsed_subject = self.mr_selector.sub("", parsed_subject).strip() body_components: dict[str, list[str]] = reduce( self.commit_body_components_separator, [ # Insert the subject before the other paragraphs parsed_subject, *parse_paragraphs(parsed_text or ""), ], { "descriptions": [], "notices": [], "linked_issues": [], }, ) level_bump = self.options.tag_to_level.get( parsed_type, self.options.default_bump_level ) return ParsedMessageResult( bump=level_bump, type=parsed_type, category=tag_to_section.get(parsed_type, "None"), scope=parsed_scope, descriptions=tuple( body_components["descriptions"] if level_bump != LevelBump.MAJOR else [parsed_subject] ), breaking_descriptions=tuple( body_components["descriptions"][1:] if level_bump == LevelBump.MAJOR else [] ), release_notices=tuple(body_components["notices"]), linked_issues=tuple(body_components["linked_issues"]), linked_merge_request=linked_merge_request, ) @staticmethod def is_merge_commit(commit: Commit) -> bool: return len(commit.parents) > 1 def parse_commit(self, commit: Commit) -> ParseResult: if not (parsed_msg_result := self.parse_message(force_str(commit.message))): return _logged_parse_error( commit, f"Unable to parse commit message: {commit.message!r}", ) return ParsedCommit.from_parsed_message_result(commit, parsed_msg_result) def parse(self, commit: Commit) -> ParseResult | list[ParseResult]: """ Parse a commit message If the commit message is a squashed merge commit, it will be split into multiple commits, each of which will be parsed separately. Single commits will be returned as a list of a single ParseResult. """ if self.options.ignore_merge_commits and self.is_merge_commit(commit): return _logged_parse_error( commit, "Ignoring merge commit: %s" % commit.hexsha[:8] ) separate_commits: list[Commit] = ( self.unsquash_commit(commit) if self.options.parse_squash_commits else [commit] ) # Parse each commit individually if there were more than one parsed_commits: list[ParseResult] = list( map(self.parse_commit, separate_commits) ) def add_linked_merge_request( parsed_result: ParseResult, mr_number: str ) -> ParseResult: return ( parsed_result if not isinstance(parsed_result, ParsedCommit) else ParsedCommit( **{ **parsed_result._asdict(), "linked_merge_request": mr_number, } ) ) # TODO: improve this for other VCS systems other than GitHub & BitBucket # Github works as the first commit in a squash merge commit has the PR number # appended to the first line of the commit message lead_commit = next(iter(parsed_commits)) if isinstance(lead_commit, ParsedCommit) and lead_commit.linked_merge_request: # If the first commit has linked merge requests, assume all commits # are part of the same PR and add the linked merge requests to all # parsed commits parsed_commits = [ lead_commit, *map( lambda parsed_result, mr=lead_commit.linked_merge_request: ( # type: ignore[misc] add_linked_merge_request(parsed_result, mr) ), parsed_commits[1:], ), ] elif isinstance(lead_commit, ParseError) and ( mr_match := self.mr_selector.search(force_str(lead_commit.message)) ): # Handle BitBucket Squash Merge Commits (see #1085), which have non angular commit # format but include the PR number in the commit subject that we want to extract linked_merge_request = mr_match.group("mr_number") # apply the linked MR to all commits parsed_commits = [ add_linked_merge_request(parsed_result, linked_merge_request) for parsed_result in parsed_commits ] return parsed_commits def unsquash_commit(self, commit: Commit) -> list[Commit]: # GitHub EXAMPLE: # feat(changelog): add autofit_text_width filter to template environment (#1062) # # This change adds an equivalent style formatter that can apply a text alignment # to a maximum width and also maintain an indent over paragraphs of text # # * docs(changelog-templates): add definition & usage of autofit_text_width template filter # # * test(changelog-context): add test cases to check autofit_text_width filter use # # `git merge --squash` EXAMPLE: # Squashed commit of the following: # # commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb # Author: codejedi365 # Date: Sun Oct 13 12:05:23 2024 -0600 # # feat(release-config): some commit subject # # Return a list of artificial commits (each with a single commit message) return [ # create a artificial commit object (copy of original but with modified message) Commit( **{ **deep_copy_commit(commit), "message": commit_msg, } ) for commit_msg in self.unsquash_commit_message(force_str(commit.message)) ] or [commit] def unsquash_commit_message(self, message: str) -> list[str]: normalized_message = message.replace("\r", "").strip() # split by obvious separate commits (applies to manual git squash merges) obvious_squashed_commits = self.filters["git-header-commit"][0].split( normalized_message ) separate_commit_msgs: list[str] = reduce( lambda all_msgs, msgs: all_msgs + msgs, map(self._find_squashed_commits_in_str, obvious_squashed_commits), [], ) return list(filter(None, separate_commit_msgs)) def _find_squashed_commits_in_str(self, message: str) -> list[str]: separate_commit_msgs: list[str] = [] current_msg = "" for paragraph in filter(None, message.strip().split("\n\n")): # Apply filters to normalize the paragraph clean_paragraph = reduce(text_reducer, self.filters.values(), paragraph) # remove any filtered (and now empty) paragraphs (ie. the git headers) if not clean_paragraph.strip(): continue # Check if the paragraph is the start of a new angular commit if not self.commit_prefix.search(clean_paragraph): if not separate_commit_msgs and not current_msg: # if there are no separate commit messages and no current message # then this is the first commit message current_msg = dedent(clean_paragraph) continue # append the paragraph as part of the previous commit message if current_msg: current_msg += f"\n\n{dedent(clean_paragraph)}" # else: drop the paragraph continue # Since we found the start of the new commit, store any previous commit # message separately and start the new commit message if current_msg: separate_commit_msgs.append(current_msg) current_msg = clean_paragraph return [*separate_commit_msgs, current_msg] python-semantic-release-10.4.1/src/semantic_release/commit_parser/tag.py000066400000000000000000000067211506116242600264310ustar00rootroot00000000000000"""Legacy commit parser from Python Semantic Release 1.0""" from __future__ import annotations import re from git.objects.commit import Commit from pydantic.dataclasses import dataclass from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.token import ParsedCommit, ParseError, ParseResult from semantic_release.commit_parser.util import breaking_re, parse_paragraphs from semantic_release.enums import LevelBump from semantic_release.globals import logger re_parser = re.compile(r"(?P[^\n]+)" + r"(:?\n\n(?P.+))?", re.DOTALL) @dataclass class TagParserOptions(ParserOptions): minor_tag: str = ":sparkles:" patch_tag: str = ":nut_and_bolt:" def _logged_parse_error(commit: Commit, error: str) -> ParseError: logger.debug(error) return ParseError(commit, error=error) class TagCommitParser(CommitParser[ParseResult, TagParserOptions]): """ Parse a commit message according to the 1.0 version of python-semantic-release. It expects a tag of some sort in the commit message and will use the rest of the first line as changelog content. """ # TODO: Deprecate in lieu of get_default_options() parser_options = TagParserOptions @staticmethod def get_default_options() -> TagParserOptions: return TagParserOptions() def parse(self, commit: Commit) -> ParseResult | list[ParseResult]: message = str(commit.message) # Attempt to parse the commit message with a regular expression parsed = re_parser.match(message) if not parsed: return _logged_parse_error( commit, error=f"Unable to parse the given commit message: {message!r}" ) subject = parsed.group("subject") # Check tags for minor or patch if self.options.minor_tag in message: level = "feature" level_bump = LevelBump.MINOR if subject: subject = subject.replace(self.options.minor_tag, "") elif self.options.patch_tag in message: level = "fix" level_bump = LevelBump.PATCH if subject: subject = subject.replace(self.options.patch_tag, "") else: # We did not find any tags in the commit message return _logged_parse_error( commit, error=f"Unable to parse the given commit message: {message!r}" ) if parsed.group("text"): descriptions = parse_paragraphs(parsed.group("text")) else: descriptions = [] descriptions.insert(0, subject.strip()) # Look for descriptions of breaking changes breaking_descriptions = [ match.group(1) for match in (breaking_re.match(p) for p in descriptions[1:]) if match ] if breaking_descriptions: level = "breaking" level_bump = LevelBump.MAJOR logger.debug( "commit %s upgraded to a %s level_bump due to included breaking descriptions", commit.hexsha[:8], level_bump, ) logger.debug( "commit %s introduces a %s level_bump", commit.hexsha[:8], level_bump ) return ParsedCommit( bump=level_bump, type=level, scope="", descriptions=descriptions, breaking_descriptions=breaking_descriptions, commit=commit, ) python-semantic-release-10.4.1/src/semantic_release/commit_parser/token.py000066400000000000000000000173401506116242600267750ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING, NamedTuple, NoReturn, TypeVar, Union from semantic_release.commit_parser.util import force_str from semantic_release.errors import CommitParseError if TYPE_CHECKING: # pragma: no cover from git.objects.commit import Commit from semantic_release.enums import LevelBump class ParsedMessageResult(NamedTuple): """ A read-only named tuple object representing the result from parsing a commit message. Essentially this is a data structure which holds the parsed information from a commit message without the actual commit object itself. Very helpful for unit testing. Most of the fields will replicate the fields of a :py:class:`ParsedCommit ` """ bump: LevelBump type: str category: str scope: str descriptions: tuple[str, ...] breaking_descriptions: tuple[str, ...] = () release_notices: tuple[str, ...] = () linked_issues: tuple[str, ...] = () linked_merge_request: str = "" include_in_changelog: bool = True class ParsedCommit(NamedTuple): """A read-only named tuple object representing the result of parsing a commit message.""" bump: LevelBump """A LevelBump enum value indicating what type of change this commit introduces.""" type: str """ The type of the commit as a string, per the commit message style. This is up to the parser to implement; for example, the EmojiCommitParser parser fills this field with the emoji representing the most significant change for the commit. """ scope: str """ The scope, as a string, parsed from the commit. Generally an optional field based on the commit message style which means it very likely can be an empty string. Commit styles which do not have a meaningful concept of "scope" usually fill this field with an empty string. """ descriptions: list[str] """ A list of paragraphs from the commit message. Paragraphs are generally delimited by a double-newline since git commit messages are sometimes manually wordwrapped with a single newline, but this is up to the parser to implement. """ breaking_descriptions: list[str] """ A list of paragraphs which are deemed to identify and describe breaking changes by the parser. An example would be a paragraph which begins with the text ``BREAKING CHANGE:`` in the commit message but the parser gennerally strips the prefix and includes the rest of the paragraph in this list. """ commit: Commit """The original commit object (a class defined by GitPython) that was parsed""" release_notices: tuple[str, ...] = () """ A tuple of release notices, which are additional information about the changes that affect the user. An example would be a paragraph which begins with the text ``NOTICE:`` in the commit message but the parser generally strips the prefix and includes the rest of the paragraph in this list. """ linked_issues: tuple[str, ...] = () """ A tuple of issue numbers as strings, if the commit is contains issue references. If there are no issue references, this should be an empty tuple. Although, we generally refer to them as "issue numbers", it generally should be a string to adhere to the prefixes used by the VCS (ex. ``#`` for GitHub, GitLab, etc.) or issue tracker (ex. JIRA uses ``AAA-###``). """ linked_merge_request: str = "" """ A pull request or merge request definition, if the commit is labeled with a pull/merge request number. This is a string value which includes any special character prefix used by the VCS (e.g. ``#`` for GitHub, ``!`` for GitLab). """ include_in_changelog: bool = True """ A boolean value indicating whether this commit should be included in the changelog. This enables parsers to flag commits which are not user-facing or are otherwise not relevant to the changelog to be filtered out by PSR's internal algorithms. """ @property def message(self) -> str: """ A string representation of the commit message. This is a pass through property for convience to access the ``message`` attribute of the ``commit`` object. If the message is of type ``bytes`` then it will be decoded to a ``UTF-8`` string. """ return force_str(self.commit.message).replace("\r", "") @property def hexsha(self) -> str: """ A hex representation of the hash value of the commit. This is a pass through property for convience to access the ``hexsha`` attribute of the ``commit``. """ return self.commit.hexsha @property def short_hash(self) -> str: """A short representation of the hash value (in hex) of the commit.""" return self.hexsha[:7] @property def linked_pull_request(self) -> str: """An alias to the linked_merge_request attribute.""" return self.linked_merge_request def is_merge_commit(self) -> bool: return bool(len(self.commit.parents) > 1) @staticmethod def from_parsed_message_result( commit: Commit, parsed_message_result: ParsedMessageResult ) -> ParsedCommit: """A convience method to create a ParsedCommit object from a ParsedMessageResult object and a Commit object.""" return ParsedCommit( bump=parsed_message_result.bump, # TODO: breaking v11, swap back to type rather than category type=parsed_message_result.category, scope=parsed_message_result.scope, descriptions=list(parsed_message_result.descriptions), breaking_descriptions=list(parsed_message_result.breaking_descriptions), commit=commit, release_notices=parsed_message_result.release_notices, linked_issues=parsed_message_result.linked_issues, linked_merge_request=parsed_message_result.linked_merge_request, include_in_changelog=parsed_message_result.include_in_changelog, ) class ParseError(NamedTuple): """A read-only named tuple object representing an error that occurred while parsing a commit message.""" commit: Commit """The original commit object (a class defined by GitPython) that was parsed""" error: str """A string with a description for why the commit parsing failed.""" @property def message(self) -> str: """ A string representation of the commit message. This is a pass through property for convience to access the ``message`` attribute of the ``commit`` object. If the message is of type ``bytes`` then it will be decoded to a ``UTF-8`` string. """ return force_str(self.commit.message).replace("\r", "") @property def hexsha(self) -> str: """ A hex representation of the hash value of the commit. This is a pass through property for convience to access the ``hexsha`` attribute of the ``commit``. """ return self.commit.hexsha @property def short_hash(self) -> str: """A short representation of the hash value (in hex) of the commit.""" return self.hexsha[:7] def is_merge_commit(self) -> bool: return bool(len(self.commit.parents) > 1) def raise_error(self) -> NoReturn: """A convience method to raise a CommitParseError with the error message.""" raise CommitParseError(self.error) _T = TypeVar("_T", bound=ParsedCommit) _E = TypeVar("_E", bound=ParseError) # For extensions, this type can be used to build an alias # for example CustomParseResult = ParseResultType[CustomParsedCommit, ParseError] ParseResultType = Union[_T, _E] ParseResult = ParseResultType[ParsedCommit, ParseError] python-semantic-release-10.4.1/src/semantic_release/commit_parser/util.py000066400000000000000000000075671506116242600266440ustar00rootroot00000000000000from __future__ import annotations from contextlib import suppress from copy import deepcopy from functools import reduce from re import MULTILINE, compile as regexp from typing import TYPE_CHECKING # TODO: remove in v11 from semantic_release.helpers import ( sort_numerically, # noqa: F401 # TODO: maintained for compatibility ) if TYPE_CHECKING: # pragma: no cover from re import Pattern from typing import Any, TypedDict from git import Commit class RegexReplaceDef(TypedDict): pattern: Pattern repl: str breaking_re = regexp(r"BREAKING[ -]CHANGE:\s?(.*)") un_word_wrap: RegexReplaceDef = { # Match a line ending where the next line is not indented, or a bullet "pattern": regexp(r"((? list[str]: r""" This will take a text block and return a list containing each paragraph with single line breaks collapsed into spaces. To handle Windows line endings, carriage returns '\r' are removed before separating into paragraphs. It will attempt to detect Git footers and they will not be condensed. :param text: The text string to be divided. :return: A list of condensed paragraphs, as strings. """ adjusted_text = reduce( lambda txt, adj: adj["pattern"].sub(adj["repl"], txt), [trim_line_endings, un_word_wrap_hyphen], text, ) # Repeat replacements until no more changes are made prev_iteration = "" while prev_iteration != adjusted_text: prev_iteration = adjusted_text adjusted_text = spread_out_git_footers["pattern"].sub( spread_out_git_footers["repl"], adjusted_text ) return list( filter( None, [ un_word_wrap["pattern"].sub(un_word_wrap["repl"], paragraph).strip() for paragraph in adjusted_text.strip().split("\n\n") ], ) ) def force_str(msg: str | bytes | bytearray | memoryview) -> str: # This shouldn't be a thing but typing is being weird around what # git.commit.message returns and the memoryview type won't go away message = msg.tobytes() if isinstance(msg, memoryview) else msg return ( message.decode("utf-8") if isinstance(message, (bytes, bytearray)) else str(message) ) def deep_copy_commit(commit: Commit) -> dict[str, Any]: keys = [ "repo", "binsha", "author", "authored_date", "committer", "committed_date", "message", "tree", "parents", "encoding", "gpgsig", "author_tz_offset", "committer_tz_offset", ] kwargs = {} for key in keys: with suppress(ValueError): if hasattr(commit, key) and (value := getattr(commit, key)) is not None: if key in ["parents", "repo", "tree"]: # These tend to have circular references so don't deepcopy them kwargs[key] = value continue kwargs[key] = deepcopy(value) return kwargs python-semantic-release-10.4.1/src/semantic_release/const.py000066400000000000000000000015351506116242600241360ustar00rootroot00000000000000from __future__ import annotations import os import re PYPI_WEB_DOMAIN = "pypi.org" # https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string SEMVER_REGEX = re.compile( r""" (?P0|[1-9]\d*) \. (?P0|[1-9]\d*) \. (?P0|[1-9]\d*) (?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))? (?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))? """, flags=re.VERBOSE, ) COMMIT_MESSAGE = "{version}\n\nAutomatically generated by python-semantic-release" DEFAULT_COMMIT_AUTHOR = "semantic-release " DEFAULT_VERSION = "0.0.0" DEFAULT_SHELL: str | None = None if os.name == "posix": DEFAULT_SHELL = os.getenv("SHELL", "/bin/sh") elif os.name == "nt": DEFAULT_SHELL = os.getenv("COMSPEC") python-semantic-release-10.4.1/src/semantic_release/data/000077500000000000000000000000001506116242600233435ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/000077500000000000000000000000001506116242600253415ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/000077500000000000000000000000001506116242600300405ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md/000077500000000000000000000000001506116242600304405ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md/.components/000077500000000000000000000000001506116242600327035ustar00rootroot00000000000000changelog_header.md.j2000066400000000000000000000002461506116242600367210ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md/.components# CHANGELOG {% if ctx.changelog_mode == "update" %}{# # IMPORTANT: add insertion flag for next version update #}{{ insertion_flag ~ "\n" }}{% endif %} changelog_init.md.j2000066400000000000000000000015361506116242600364370ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md/.components{# This changelog template initializes a full changelog for the project, it follows the following logic: 1. Header 2. Any Unreleased Details (uncommon) 3. all previous releases except the very first release 4. the first release #}{# # Header #}{% include "changelog_header.md.j2" -%}{# # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.md.j2" -%}{# # Since this is initialization, we are generating all the previous # release notes per version. The very first release notes is specialized #}{% if releases | length > 0 %}{% for release in releases %}{{ "\n" }}{% if loop.last and ctx.mask_initial_release %}{%- include "first_release.md.j2" -%}{% else %}{%- include "versioned_changes.md.j2" -%}{% endif %}{{ "\n" }}{% endfor %}{% endif %} changelog_update.md.j2000066400000000000000000000050561506116242600367570ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md/.components{# This Update changelog template uses the following logic: 1. Read previous changelog file (ex. project_root/CHANGELOG.md) 2. Split on insertion flag (ex. ) 3. Print top half of previous changelog 3. New Changes (unreleased commits & newly released) 4. Print bottom half of previous changelog Note: if a previous file was not found, it does not write anything at the bottom but render does NOT fail #}{% set prev_changelog_contents = prev_changelog_file | read_file | safe %}{% set changelog_parts = prev_changelog_contents.split(insertion_flag, maxsplit=1) %}{# #}{% if changelog_parts | length < 2 %}{# # insertion flag was not found, check if the file was empty or did not exist #}{% if prev_changelog_contents | length > 0 %}{# # File has content but no insertion flag, therefore, file will not be updated #}{{ changelog_parts[0] }}{% else %}{# # File was empty or did not exist, therefore, it will be created from scratch #}{% include "changelog_init.md.j2" %}{% endif %}{% else %}{# # Previous Changelog Header # - Depending if there is header content, then it will separate the insertion flag # with a newline from header content, otherwise it will just print the insertion flag #}{% set prev_changelog_top = changelog_parts[0] | trim %}{% if prev_changelog_top | length > 0 %}{{ "%s\n\n%s\n" | format(prev_changelog_top, insertion_flag | trim) }}{% else %}{{ "%s\n" | format(insertion_flag | trim) }}{% endif %}{# # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.md.j2" -%}{# #}{% if releases | length > 0 %}{# # Latest Release Details #}{% set release = releases[0] %}{# #}{% if releases | length == 1 and ctx.mask_initial_release %}{# # First Release detected #}{{ "\n" }}{%- include "first_release.md.j2" -%}{{ "\n" }}{# #}{% elif "# " ~ release.version.as_semver_tag() ~ " " not in changelog_parts[1] %}{# # The release version is not already in the changelog so we add it #}{{ "\n" }}{%- include "versioned_changes.md.j2" -%}{{ "\n" }}{# #}{% endif %}{% endif %}{# # Previous Changelog Footer # - skips printing footer if empty, which happens when the insertion_flag # was at the end of the file (ignoring whitespace) #}{% set previous_changelog_bottom = changelog_parts[1] | trim %}{% if previous_changelog_bottom | length > 0 %}{{ "\n%s\n" | format(previous_changelog_bottom) }}{% endif %}{% endif %} changes.md.j2000066400000000000000000000126621506116242600350770ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md/.components{% from 'macros.md.j2' import apply_alphabetical_ordering_by_brk_descriptions %}{% from 'macros.md.j2' import apply_alphabetical_ordering_by_descriptions %}{% from 'macros.md.j2' import apply_alphabetical_ordering_by_release_notices %}{% from 'macros.md.j2' import format_breaking_changes_description, format_commit_summary_line %}{% from 'macros.md.j2' import format_release_notice %}{# EXAMPLE: ### Features - Add new feature ([#10](https://domain.com/namespace/repo/pull/10), [`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) - **scope**: Add new feature ([`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) ### Bug Fixes - Fix bug ([#11](https://domain.com/namespace/repo/pull/11), [`abcdef1`](https://domain.com/namespace/repo/commit/HASH)) ### Breaking Changes - With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. - **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. ### Additional Release Information - This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. - **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. #}{% set max_line_width = max_line_width | default(100) %}{% set hanging_indent = hanging_indent | default(2) %}{# #}{% for type_, commits in commit_objects if type_ != "unknown" %}{# PREPROCESS COMMITS (order by description & format description line) #}{% set ns = namespace(commits=commits) %}{% set _ = apply_alphabetical_ordering_by_descriptions(ns) %}{# #}{% set commit_descriptions = [] %}{# #}{% for commit in ns.commits %}{# # Add reference links to the commit summary line #}{% set description = "- %s" | format(format_commit_summary_line(commit)) %}{% set description = description | autofit_text_width(max_line_width, hanging_indent) %}{% set _ = commit_descriptions.append(description) %}{% endfor %}{# # # PRINT SECTION (header & commits) #}{% if commit_descriptions | length > 0 %}{{ "\n" }}{{ "### %s\n" | format(type_ | title) }}{{ "\n" }}{{ "%s\n" | format(commit_descriptions | unique | join("\n\n")) }}{% endif %}{% endfor %}{# # Determine if there are any breaking change commits by filtering the list by breaking descriptions # commit_objects is a list of tuples [("Features", [ParsedCommit(), ...]), ("Bug Fixes", [ParsedCommit(), ...])] # HOW: Filter out breaking change commits that have no breaking descriptions # 1. Re-map the list to only the list of commits under the breaking category from the list of tuples # 2. Peel off the outer list to get a list of ParsedCommit objects # 3. Filter the list of ParsedCommits to only those with a breaking description #}{% set breaking_commits = commit_objects | map(attribute="1.0") %}{% set breaking_commits = breaking_commits | rejectattr("error", "defined") | selectattr("breaking_descriptions.0") | list %}{# #}{% if breaking_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set brk_ns = namespace(commits=breaking_commits) %}{% set _ = apply_alphabetical_ordering_by_brk_descriptions(brk_ns) %}{# #}{% set brking_descriptions = [] %}{# #}{% for commit in brk_ns.commits %}{% set full_description = "- %s" | format( format_breaking_changes_description(commit).split("\n\n") | join("\n\n- ") ) %}{% set _ = brking_descriptions.append( full_description | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT BREAKING CHANGE DESCRIPTIONS (header & descriptions) #}{{ "\n" }}{{ "### Breaking Changes\n" }}{{ "\n%s\n" | format(brking_descriptions | unique | join("\n\n")) }}{# #}{% endif %}{# # Determine if there are any commits with release notice information by filtering the list by release_notices # commit_objects is a list of tuples [("Features", [ParsedCommit(), ...]), ("Bug Fixes", [ParsedCommit(), ...])] # HOW: Filter out commits that have no release notices # 1. Re-map the list to only the list of commits from the list of tuples # 2. Peel off the outer list to get a list of ParsedCommit objects # 3. Filter the list of ParsedCommits to only those with a release notice #}{% set notice_commits = commit_objects | map(attribute="1.0") %}{% set notice_commits = notice_commits | rejectattr("error", "defined") | selectattr("release_notices.0") | list %}{# #}{% if notice_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set notice_ns = namespace(commits=notice_commits) %}{% set _ = apply_alphabetical_ordering_by_release_notices(notice_ns) %}{# #}{% set release_notices = [] %}{# #}{% for commit in notice_ns.commits %}{% set full_description = "- %s" | format( format_release_notice(commit).split("\n\n") | join("\n\n- ") ) %}{% set _ = release_notices.append( full_description | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT RELEASE NOTICE INFORMATION (header & descriptions) #}{{ "\n" }}{{ "### Additional Release Information\n" }}{{ "\n%s\n" | format(release_notices | unique | join("\n\n")) }}{# #}{% endif %} first_release.md.j2000066400000000000000000000006451506116242600363140ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md/.components{# EXAMPLE: ## vX.X.X (YYYY-MMM-DD) _This release is published under the MIT License._ # Release Notes Only - Initial Release #}{{ "## %s (%s)\n" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) }}{% if license_name is defined and license_name %}{{ "\n_This release is published under the %s License._\n" | format(license_name) }}{% endif %} - Initial Release macros.md.j2000066400000000000000000000146061506116242600347530ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md/.components{# MACRO: format a inline link reference in Markdown #}{% macro format_link(link, label) %}{{ "[%s](%s)" | format(label, link) }}{% endmacro %} {# MACRO: Capitalize the first letter of a string only #}{% macro capitalize_first_letter_only(sentence) %}{{ (sentence[0] | upper) ~ sentence[1:] }}{% endmacro %} {# MACRO: commit message links or PR/MR links of commit #}{% macro commit_msg_links(commit) %}{% if commit.error is undefined %}{# # # Initialize variables #}{% set link_references = [] %}{% set summary_line = capitalize_first_letter_only( commit.descriptions[0] | safe ) %}{# #}{% if commit.linked_merge_request != "" %}{# # Add PR references with a link to the PR #}{% set _ = link_references.append( format_link( commit.linked_merge_request | pull_request_url, commit.linked_merge_request ) ) %}{% endif %}{# # # DEFAULT: Always include the commit hash as a link #}{% set _ = link_references.append( format_link( commit.hexsha | commit_hash_url, "`%s`" | format(commit.short_hash) ) ) %}{# #}{% set formatted_links = "" %}{% if link_references | length > 0 %}{% set formatted_links = " (%s)" | format(link_references | join(", ")) %}{% endif %}{# # Return the modified summary_line #}{{ summary_line ~ formatted_links }}{% endif %}{% endmacro %} {# MACRO: format commit summary line #}{% macro format_commit_summary_line(commit) %}{# # Check for Parsing Error #}{% if commit.error is undefined %}{# # # Add any message links to the commit summary line #}{% set summary_line = commit_msg_links(commit) %}{# #}{% if commit.scope %}{% set summary_line = "**%s**: %s" | format(commit.scope, summary_line) %}{% endif %}{# # # Return the modified summary_line #}{{ summary_line }}{# #}{% else %}{# # Return the first line of the commit if there was a Parsing Error #}{{ (commit.commit.message | string).split("\n", maxsplit=1)[0] }}{% endif %}{% endmacro %} {# MACRO: format a commit descriptions list by: - Capitalizing the first line of the description - Adding an optional scope prefix - Joining the rest of the descriptions with a double newline #}{% macro format_attr_paragraphs(commit, attribute) %}{# NOTE: requires namespace because of the way Jinja2 handles variable scoping with loops #}{% set ns = namespace(full_description="") %}{# #}{% if commit.error is undefined %}{% for paragraph in commit | attr(attribute) %}{% if paragraph | trim | length > 0 %}{# #}{% set ns.full_description = [ ns.full_description, capitalize_first_letter_only(paragraph) | trim | safe, ] | join("\n\n") %}{# #}{% endif %}{% endfor %}{# #}{% set ns.full_description = ns.full_description | trim %}{# #}{% if commit.scope %}{% set ns.full_description = "**%s**: %s" | format( commit.scope, ns.full_description ) %}{% endif %}{% endif %}{# #}{{ ns.full_description }}{% endmacro %} {# MACRO: format the breaking changes description by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_breaking_changes_description(commit) %}{{ format_attr_paragraphs(commit, 'breaking_descriptions') }}{% endmacro %} {# MACRO: format the release notice by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_release_notice(commit) %}{{ format_attr_paragraphs(commit, "release_notices") }}{% endmacro %} {# MACRO: order commits alphabetically by scope and attribute - Commits are sorted based on scope and then the attribute alphabetically - Commits without scope are placed first and sorted alphabetically by the attribute - parameter: ns (namespace) object with a commits list - parameter: attr (string) attribute to sort by - returns None but modifies the ns.commits list in place #}{% macro order_commits_alphabetically_by_scope_and_attr(ns, attr) %}{% set ordered_commits = [] %}{# # # Eliminate any ParseError commits from input set #}{% set filtered_commits = ns.commits | rejectattr("error", "defined") | list %}{# # # grab all commits with no scope and sort alphabetically by attr #}{% for commit in filtered_commits | rejectattr("scope") | sort(attribute=attr) %}{% set _ = ordered_commits.append(commit) %}{% endfor %}{# # # grab all commits with a scope and sort alphabetically by the scope and then attr #}{% for commit in filtered_commits | selectattr("scope") | sort(attribute=(['scope', attr] | join(","))) %}{% set _ = ordered_commits.append(commit) %}{% endfor %}{# # # Return the ordered commits #}{% set ns.commits = ordered_commits %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized summaries and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_descriptions(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'descriptions.0') %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized breaking changes and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_brk_descriptions(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'breaking_descriptions.0') %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized release notices and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_release_notices(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'release_notices.0') %}{% endmacro %} unreleased_changes.md.j2000066400000000000000000000002611506116242600372760ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md/.components{% if unreleased_commits | length > 0 %}{{ "\n## Unreleased\n" }}{% set commit_objects = unreleased_commits %}{% include "changes.md.j2" -%}{{ "\n" }}{% endif %} versioned_changes.md.j2000066400000000000000000000007661506116242600371570ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md/.components{# EXAMPLE: ## vX.X.X (YYYY-MMM-DD) _This release is published under the MIT License._ # Release Notes Only {{ change_sections }} #}{{ "## %s (%s)\n" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) }}{% if license_name is defined and license_name %}{{ "\n_This release is published under the %s License._\n" | format(license_name) }}{% endif %}{# #}{% set commit_objects = release["elements"] | dictsort %}{% include "changes.md.j2" -%} .release_notes.md.j2000066400000000000000000000047201506116242600341260ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md{# EXAMPLE: ## v1.0.0 (2020-01-01) _This release is published under the MIT License._ ### Features - Add new feature ([#10](https://domain.com/namespace/repo/pull/10), [`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) - **scope**: Add new feature ([`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) ### Bug Fixes - Fix bug (#11, [`abcdef1`](https://domain.com/namespace/repo/commit/HASH)) ### Breaking Changes - With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. - **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. ### Additional Release Information - This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. - **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. --- **Detailed Changes**: [vX.X.X...vX.X.X](https://domain.com/namespace/repo/compare/vX.X.X...vX.X.X) #}{# # Set line width to 1000 to avoid wrapping as GitHub will handle it #}{% set max_line_width = max_line_width | default(1000) %}{% set hanging_indent = hanging_indent | default(2) %}{% set license_name = license_name | default("", True) %}{% set releases = context.history.released.values() | list %}{% set curr_release_index = releases.index(release) %}{# #}{% if mask_initial_release and curr_release_index == releases | length - 1 %}{# # On a first release, generate our special message #}{% include ".components/first_release.md.j2" %}{% else %}{# # Not the first release so generate notes normally #}{% include ".components/versioned_changes.md.j2" -%}{# #}{% set prev_release_index = curr_release_index + 1 %}{# #}{% if 'compare_url' is filter and prev_release_index < releases | length %}{% set prev_version_tag = releases[prev_release_index].version.as_tag() %}{% set new_version_tag = release.version.as_tag() %}{% set version_compare_url = prev_version_tag | compare_url(new_version_tag) %}{% set detailed_changes_link = '[{}...{}]({})'.format( prev_version_tag, new_version_tag, version_compare_url ) %}{{ "\n" }}{{ "---\n" }}{{ "\n" }}{{ "**Detailed Changes**: %s" | format(detailed_changes_link) }}{% endif %}{% endif %} python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/md/CHANGELOG.md.j2000066400000000000000000000013311506116242600325610ustar00rootroot00000000000000{# This changelog template controls which changelog creation occurs based on which mode is provided. Modes: - init: Initialize a full changelog from scratch - update: Insert new version details where the placeholder exists in the current changelog #}{% set insertion_flag = ctx.changelog_insertion_flag %}{% set unreleased_commits = ctx.history.unreleased | dictsort %}{% set releases = ctx.history.released.values() | list %}{# #}{% if ctx.changelog_mode == "init" %}{% include ".components/changelog_init.md.j2" %}{# #}{% elif ctx.changelog_mode == "update" %}{% set prev_changelog_file = ctx.prev_changelog_file %}{% include ".components/changelog_update.md.j2" %}{# #}{% endif %} python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/rst/000077500000000000000000000000001506116242600306505ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/rst/.components/000077500000000000000000000000001506116242600331135ustar00rootroot00000000000000changelog_header.rst.j2000066400000000000000000000003101506116242600373310ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/rst/.components.. _changelog: ========= CHANGELOG ========= {% if ctx.changelog_mode == "update" %}{# # IMPORTANT: add insertion flag for next version update #}{{ insertion_flag ~ "\n" }}{% endif %} changelog_init.rst.j2000066400000000000000000000015421506116242600370540ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/rst/.components{# This changelog template initializes a full changelog for the project, it follows the following logic: 1. Header 2. Any Unreleased Details (uncommon) 3. all previous releases except the very first release 4. the first release #}{# # Header #}{% include "changelog_header.rst.j2" -%}{# # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.rst.j2" -%}{# # Since this is initialization, we are generating all the previous # release notes per version. The very first release notes is specialized #}{% if releases | length > 0 %}{% for release in releases %}{{ "\n" }}{% if loop.last and ctx.mask_initial_release %}{%- include "first_release.rst.j2" -%}{% else %}{%- include "versioned_changes.rst.j2" -%}{% endif %}{{ "\n" }}{% endfor %}{% endif %} changelog_update.rst.j2000066400000000000000000000050541506116242600373750ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/rst/.components{# This Update changelog template uses the following logic: 1. Read previous changelog file (ex. project_root/CHANGELOG.md) 2. Split on insertion flag (ex. ) 3. Print top half of previous changelog 3. New Changes (unreleased commits & newly released) 4. Print bottom half of previous changelog Note: if a previous file was not found, it does not write anything at the bottom but render does NOT fail #}{% set prev_changelog_contents = prev_changelog_file | read_file | safe %}{% set changelog_parts = prev_changelog_contents.split(insertion_flag, maxsplit=1) %}{# #}{% if changelog_parts | length < 2 %}{# # insertion flag was not found, check if the file was empty or did not exist #}{% if prev_changelog_contents | length > 0 %}{# # File has content but no insertion flag, therefore, file will not be updated #}{{ changelog_parts[0] }}{% else %}{# # File was empty or did not exist, therefore, it will be created from scratch #}{% include "changelog_init.rst.j2" %}{% endif %}{% else %}{# # Previous Changelog Header # - Depending if there is header content, then it will separate the insertion flag # with a newline from header content, otherwise it will just print the insertion flag #}{% set prev_changelog_top = changelog_parts[0] | trim %}{% if prev_changelog_top | length > 0 %}{{ "%s\n\n%s\n" | format(prev_changelog_top, insertion_flag | trim) }}{% else %}{{ "%s\n" | format(insertion_flag | trim) }}{% endif %}{# # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.rst.j2" -%}{# #}{% if releases | length > 0 %}{# # Latest Release Details #}{% set release = releases[0] %}{# #}{% if releases | length == 1 and ctx.mask_initial_release %}{# # First Release detected #}{{ "\n" }}{%- include "first_release.rst.j2" -%}{{ "\n" }}{# #}{% elif release.version.as_semver_tag() ~ " (" not in changelog_parts[1] %}{# # The release version is not already in the changelog so we add it #}{{ "\n" }}{%- include "versioned_changes.rst.j2" -%}{{ "\n" }}{# #}{% endif %}{% endif %}{# # Previous Changelog Footer # - skips printing footer if empty, which happens when the insertion_flag # was at the end of the file (ignoring whitespace) #}{% set previous_changelog_bottom = changelog_parts[1] | trim %}{% if previous_changelog_bottom | length > 0 %}{{ "\n%s\n" | format(previous_changelog_bottom) }}{% endif %}{% endif %} changes.rst.j2000066400000000000000000000154121506116242600355130ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/rst/.components{% from 'macros.rst.j2' import apply_alphabetical_ordering_by_brk_descriptions %}{% from 'macros.rst.j2' import apply_alphabetical_ordering_by_descriptions %}{% from 'macros.rst.j2' import apply_alphabetical_ordering_by_release_notices %}{% from 'macros.rst.j2' import extract_pr_link_reference, format_breaking_changes_description %}{% from 'macros.rst.j2' import format_commit_summary_line, format_link_reference %}{% from 'macros.rst.j2' import format_release_notice, generate_heading_underline %}{# Features -------- * Add new feature (`#10`_, `8a7b8ec`_) * **scope**: Add another feature (`abcdef0`_) Bug Fixes --------- * Fix bug (`#11`_, `8a7b8ec`_) Breaking Changes ---------------- * With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. * **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. Additional Release Information ------------------------------ * This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. * **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. .. _#10: https://domain.com/namespace/repo/pull/10 .. _#11: https://domain.com/namespace/repo/pull/11 .. _8a7B8ec: https://domain.com/owner/repo/commit/8a7b8ec .. _abcdef0: https://domain.com/owner/repo/commit/abcdef0 #}{% set max_line_width = max_line_width | default(100) %}{% set hanging_indent = hanging_indent | default(2) %}{# #}{% set post_paragraph_links = [] %}{# #}{% for type_, commits in commit_objects if type_ != "unknown" %}{# # PREPARE SECTION HEADER #}{% set section_header = "%s" | format(type_ | title) %}{# # # PREPROCESS COMMITS #}{% set ns = namespace(commits=commits) %}{% set _ = apply_alphabetical_ordering_by_descriptions(ns) %}{# #}{% set commit_descriptions = [] %}{# #}{% for commit in ns.commits %}{# # Extract PR/MR reference if it exists and store it for later #}{% set pr_link_reference = extract_pr_link_reference(commit) | default("", true) %}{% if pr_link_reference != "" %}{% set _ = post_paragraph_links.append(pr_link_reference) %}{% endif %}{# # # Always generate a commit hash reference link and store it for later #}{% set commit_hash_link_reference = format_link_reference( commit.hexsha | commit_hash_url, commit.short_hash ) %}{% set _ = post_paragraph_links.append(commit_hash_link_reference) %}{# # Generate the commit summary line and format it for RST # autoformatting the reference links #}{% set description = "* %s" | format(format_commit_summary_line(commit)) %}{% set description = description | convert_md_to_rst %}{% set description = description | autofit_text_width(max_line_width, hanging_indent) %}{% set _ = commit_descriptions.append(description) %}{% endfor %}{# # # PRINT SECTION (Header & Commits) #}{% if commit_descriptions | length > 0 %}{{ "\n" }}{{ section_header ~ "\n" }}{{ generate_heading_underline(section_header, '-') }}{{ "\n" }}{{ "\n%s\n" | format(commit_descriptions | unique | join("\n\n")) }}{% endif %}{% endfor %}{# # Determine if there are any breaking change commits by filtering the list by breaking descriptions # commit_objects is a list of tuples [("Features", [ParsedCommit(), ...]), ("Bug Fixes", [ParsedCommit(), ...])] # HOW: Filter out breaking change commits that have no breaking descriptions # 1. Re-map the list to only the list of commits under the breaking category from the list of tuples # 2. Peel off the outer list to get a list of ParsedCommit objects # 3. Filter the list of ParsedCommits to only those with a breaking description #}{% set breaking_commits = commit_objects | map(attribute="1.0") %}{% set breaking_commits = breaking_commits | rejectattr("error", "defined") | selectattr("breaking_descriptions.0") | list %}{# #}{% if breaking_commits | length > 0 %}{# # PREPROCESS COMMITS #}{% set brk_ns = namespace(commits=breaking_commits) %}{% set _ = apply_alphabetical_ordering_by_brk_descriptions(brk_ns) %}{# #}{% set brking_descriptions = [] %}{# #}{% for commit in brk_ns.commits %}{% set full_description = "* %s" | format( format_breaking_changes_description(commit).split("\n\n") | join("\n\n* ") ) %}{% set _ = brking_descriptions.append( full_description | convert_md_to_rst | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT BREAKING CHANGE DESCRIPTIONS (header & descriptions) #}{{ "\n" }}{{ "Breaking Changes\n" }}{{ '----------------\n' }}{{ "\n%s\n" | format(brking_descriptions | unique | join("\n\n")) }}{# #}{% endif %}{# # Determine if there are any commits with release notice information by filtering the list by release_notices # commit_objects is a list of tuples [("Features", [ParsedCommit(), ...]), ("Bug Fixes", [ParsedCommit(), ...])] # HOW: Filter out commits that have no release notices # 1. Re-map the list to only the list of commits from the list of tuples # 2. Peel off the outer list to get a list of ParsedCommit objects # 3. Filter the list of ParsedCommits to only those with a release notice #}{% set notice_commits = commit_objects | map(attribute="1.0") %}{% set notice_commits = notice_commits | rejectattr("error", "defined") | selectattr("release_notices.0") | list %}{# #}{% if notice_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set notice_ns = namespace(commits=notice_commits) %}{% set _ = apply_alphabetical_ordering_by_release_notices(notice_ns) %}{# #}{% set release_notices = [] %}{# #}{% for commit in notice_ns.commits %}{% set full_description = "* %s" | format( format_release_notice(commit).split("\n\n") | join("\n\n* ") ) %}{% set _ = release_notices.append( full_description | convert_md_to_rst | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT RELEASE NOTICE INFORMATION (header & descriptions) #}{{ "\n" }}{{ "Additional Release Information\n" }}{{ "------------------------------\n" }}{{ "\n%s\n" | format(release_notices | unique | join("\n\n")) }}{# #}{% endif %}{# # # # PRINT POST PARAGRAPH LINKS #}{% if post_paragraph_links | length > 0 %}{# # Print out any PR/MR or Issue URL references that were found in the commit messages #}{{ "\n%s\n" | format(post_paragraph_links | unique | sort | join("\n")) }}{% endif %} first_release.rst.j2000066400000000000000000000007161506116242600367330ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/rst/.components{% from "macros.rst.j2" import generate_heading_underline %}{# .. _changelog-vX.X.X: vX.X.X (YYYY-MMM-DD) ==================== * Initial Release #}{% set version_header = "%s (%s)" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) %} {{- ".. _changelog-%s:" | format(release.version.as_semver_tag()) }} {{ version_header }} {{ generate_heading_underline(version_header, "=") }} * Initial Release macros.rst.j2000066400000000000000000000162031506116242600353660ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/rst/.components{# MACRO: Capitalize the first letter of a string only #}{% macro capitalize_first_letter_only(sentence) %}{{ (sentence[0] | upper) ~ sentence[1:] }}{% endmacro %} {# MACRO: format a post-paragraph link reference in RST #}{% macro format_link_reference(link, label) %}{{ ".. _%s: %s" | format(label, link) }}{% endmacro %} {# MACRO: generate a heading underline that matches the exact length of the header #} {% macro generate_heading_underline(header, underline_char) %}{% set header_underline = [] %}{% for _ in header %}{% set __ = header_underline.append(underline_char) %}{% endfor %}{# # Print out the header underline #}{{ header_underline | join }}{% endmacro %} {# MACRO: formats a commit message for a non-inline RST link for a commit hash and/or PR/MR #}{% macro commit_msg_links(commit) %}{% if commit.error is undefined %}{# # # Initialize variables #}{% set link_references = [] %}{% set summary_line = capitalize_first_letter_only( commit.descriptions[0] | safe ) %}{# #}{% if commit.linked_merge_request != "" %}{# # Add PR/MR references with a link to the PR/MR #}{% set _ = link_references.append("`%s`_" | format(commit.linked_merge_request)) %}{% endif %}{# # DEFAULT: Always include the commit hash as a link #}{% set _ = link_references.append("`%s`_" | format(commit.short_hash)) %}{# #}{% set formatted_links = "" %}{% if link_references | length > 0 %}{% set formatted_links = " (%s)" | format(link_references | join(", ")) %}{% endif %}{# # Return the modified summary_line #}{{ summary_line ~ formatted_links }}{% endif %}{% endmacro %} {# MACRO: format commit summary line #}{% macro format_commit_summary_line(commit) %}{# # Check for Parsing Error #}{% if commit.error is undefined %}{# # # Add any message links to the commit summary line #}{% set summary_line = commit_msg_links(commit) %}{# #}{% if commit.scope %}{% set summary_line = "**%s**: %s" | format(commit.scope, summary_line) %}{% endif %}{# # # Return the modified summary_line #}{{ summary_line }}{# #}{% else %}{# # Return the first line of the commit if there was a Parsing Error #}{{ (commit.commit.message | string).split("\n", maxsplit=1)[0] }}{% endif %}{% endmacro %} {# MACRO: Create & return an non-inline RST link from a commit message - Returns empty string if no PR/MR identifier is found #}{% macro extract_pr_link_reference(commit) %}{% if commit.error is undefined %}{% set summary_line = commit.descriptions[0] %}{# #}{% if commit.linked_merge_request != "" %}{# # Create a PR/MR reference url #}{{ format_link_reference( commit.linked_merge_request | pull_request_url, commit.linked_merge_request, ) }}{% endif %}{% endif %}{% endmacro %} {# MACRO: format a commit descriptions list by: - Capitalizing the first line of the description - Adding an optional scope prefix - Joining the rest of the descriptions with a double newline #}{% macro format_attr_paragraphs(commit, attribute) %}{# NOTE: requires namespace because of the way Jinja2 handles variable scoping with loops #}{% set ns = namespace(full_description="") %}{# #}{% if commit.error is undefined %}{% for paragraph in commit | attr(attribute) %}{% if paragraph | trim | length > 0 %}{# #}{% set ns.full_description = [ ns.full_description, capitalize_first_letter_only(paragraph) | trim | safe, ] | join("\n\n") %}{# #}{% endif %}{% endfor %}{# #}{% set ns.full_description = ns.full_description | trim %}{# #}{% if commit.scope %}{% set ns.full_description = "**%s**: %s" | format( commit.scope, ns.full_description ) %}{% endif %}{% endif %}{# #}{{ ns.full_description }}{% endmacro %} {# MACRO: format the breaking changes description by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_breaking_changes_description(commit) %}{{ format_attr_paragraphs(commit, 'breaking_descriptions') }}{% endmacro %} {# MACRO: format the release notice by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_release_notice(commit) %}{{ format_attr_paragraphs(commit, "release_notices") }}{% endmacro %} {# MACRO: order commits alphabetically by scope and attribute - Commits are sorted based on scope and then the attribute alphabetically - Commits without scope are placed first and sorted alphabetically by the attribute - parameter: ns (namespace) object with a commits list - parameter: attr (string) attribute to sort by - returns None but modifies the ns.commits list in place #}{% macro order_commits_alphabetically_by_scope_and_attr(ns, attr) %}{% set ordered_commits = [] %}{# # # Eliminate any ParseError commits from input set #}{% set filtered_commits = ns.commits | rejectattr("error", "defined") | list %}{# # # grab all commits with no scope and sort alphabetically by attr #}{% for commit in filtered_commits | rejectattr("scope") | sort(attribute=attr) %}{% set _ = ordered_commits.append(commit) %}{% endfor %}{# # # grab all commits with a scope and sort alphabetically by the scope and then attr #}{% for commit in filtered_commits | selectattr("scope") | sort(attribute=(['scope', attr] | join(","))) %}{% set _ = ordered_commits.append(commit) %}{% endfor %}{# # # Return the ordered commits #}{% set ns.commits = ordered_commits %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized summaries and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_descriptions(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'descriptions.0') %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized breaking changes and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_brk_descriptions(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'breaking_descriptions.0') %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized release notices and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_release_notices(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'release_notices.0') %}{% endmacro %} unreleased_changes.rst.j2000066400000000000000000000003121506116242600377130ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/rst/.components{% if unreleased_commits | length > 0 %} .. _changelog-unreleased: Unreleased ========== {% set commit_objects = unreleased_commits %}{% include "changes.rst.j2" -%}{{ "\n" }}{% endif %} versioned_changes.rst.j2000066400000000000000000000010511506116242600375630ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/rst/.components{% from 'macros.rst.j2' import generate_heading_underline %}{# .. _changelog-X.X.X: vX.X.X (YYYY-MMM-DD) ==================== {{ change_sections }} #}{% set version_header = "%s (%s)" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) %}{# #}{{ ".. _changelog-%s:" | format(release.version.as_semver_tag()) }} {{ version_header }} {{ generate_heading_underline(version_header, "=") }} {# #}{% set commit_objects = release["elements"] | dictsort %}{% include "changes.rst.j2" -%} python-semantic-release-10.4.1/src/semantic_release/data/templates/conventional/rst/CHANGELOG.rst.j2000066400000000000000000000013331506116242600332030ustar00rootroot00000000000000{# This changelog template controls which changelog creation occurs based on which mode is provided. Modes: - init: Initialize a full changelog from scratch - update: Insert new version details where the placeholder exists in the current changelog #}{% set insertion_flag = ctx.changelog_insertion_flag %}{% set unreleased_commits = ctx.history.unreleased | dictsort %}{% set releases = ctx.history.released.values() | list %}{# #}{% if ctx.changelog_mode == "init" %}{% include ".components/changelog_init.rst.j2" %}{# #}{% elif ctx.changelog_mode == "update" %}{% set prev_changelog_file = ctx.prev_changelog_file %}{% include ".components/changelog_update.rst.j2" %}{# #}{% endif %} python-semantic-release-10.4.1/src/semantic_release/enums.py000066400000000000000000000033201506116242600241310ustar00rootroot00000000000000from __future__ import annotations import logging from enum import IntEnum, unique @unique class LevelBump(IntEnum): """ IntEnum representing valid types of bumps for a version. We use an IntEnum to enable ordering of levels. """ NO_RELEASE = 0 PRERELEASE_REVISION = 1 PATCH = 2 MINOR = 3 MAJOR = 4 def __str__(self) -> str: """ Return the level name rather than 'LevelBump.' E.g. >>> str(LevelBump.NO_RELEASE) 'no_release' >>> str(LevelBump.MAJOR) 'major' """ return self.name.lower() @classmethod def from_string(cls, val: str) -> LevelBump: """ Get the level from string representation. For backwards-compatibility, dashes are replaced with underscores so that: >>> LevelBump.from_string("no-release") == LevelBump.NO_RELEASE Equally, >>> LevelBump.from_string("minor") == LevelBump.MINOR """ return cls[val.upper().replace("-", "_")] class SemanticReleaseLogLevels(IntEnum): """IntEnum representing the log levels used by semantic-release.""" FATAL = logging.FATAL CRITICAL = logging.CRITICAL ERROR = logging.ERROR WARNING = logging.WARNING INFO = logging.INFO DEBUG = logging.DEBUG SILLY = 5 def __str__(self) -> str: """ Return the level name rather than 'SemanticReleaseLogLevels.' E.g. >>> str(SemanticReleaseLogLevels.DEBUG) 'DEBUG' >>> str(SemanticReleaseLogLevels.CRITICAL) 'CRITICAL' """ return self.name.upper() logging.addLevelName( SemanticReleaseLogLevels.SILLY, str(SemanticReleaseLogLevels.SILLY), ) python-semantic-release-10.4.1/src/semantic_release/errors.py000066400000000000000000000055301506116242600243230ustar00rootroot00000000000000"""Custom Errors""" class SemanticReleaseBaseError(Exception): """ Base Exception from which all other custom Exceptions defined in semantic_release inherit """ class InternalError(SemanticReleaseBaseError): """Raised when an internal error occurs, which should never happen""" class InvalidConfiguration(SemanticReleaseBaseError): """Raised when configuration is deemed invalid""" class InvalidParserOptions(InvalidConfiguration): """Raised when the parser options are invalid""" class MissingGitRemote(SemanticReleaseBaseError): """Raised when repository is missing the configured remote origin or upstream""" class InvalidVersion(ValueError, SemanticReleaseBaseError): """ Raised when Version.parse attempts to parse a string containing an invalid version. """ class NotAReleaseBranch(InvalidConfiguration): """ Raised when semantic_release is invoked on a branch which isn't configured for releases """ class DetachedHeadGitError(SemanticReleaseBaseError): """Raised when the git repository is in a detached HEAD state""" class CommitParseError(SemanticReleaseBaseError): """ Raised when a commit cannot be parsed by a commit parser. Custom commit parsers should also raise this Exception """ class MissingMergeBaseError(SemanticReleaseBaseError): """ Raised when the merge base cannot be found with the current history. Generally because of a shallow git clone. """ class UnexpectedResponse(SemanticReleaseBaseError): """ Raised when an HTTP response cannot be parsed properly or the expected structure is not found. """ class IncompleteReleaseError(SemanticReleaseBaseError): """ Raised when there is a failure amongst one of the api requests when creating a release on a remote hvcs. """ class AssetUploadError(SemanticReleaseBaseError): """ Raised when there is a failure uploading an asset to a remote hvcs's release artifact storage. """ class ParserLoadError(SemanticReleaseBaseError): """ Raised when there is a failure to find, load, or instantiate a custom parser definition. """ class BuildDistributionsError(SemanticReleaseBaseError): """Raised when there is a failure to build the distribution files.""" class GitAddError(SemanticReleaseBaseError): """Raised when there is a failure to add files to the git index.""" class GitCommitError(SemanticReleaseBaseError): """Raised when there is a failure to commit the changes.""" class GitCommitEmptyIndexError(SemanticReleaseBaseError): """Raised when there is an attempt to commit an empty index.""" class GitTagError(SemanticReleaseBaseError): """Raised when there is a failure to tag the release.""" class GitPushError(SemanticReleaseBaseError): """Raised when there is a failure to push to the git remote.""" python-semantic-release-10.4.1/src/semantic_release/gitproject.py000066400000000000000000000223171506116242600251630ustar00rootroot00000000000000"""Module for git related operations.""" from __future__ import annotations from contextlib import nullcontext from datetime import datetime from pathlib import Path from typing import TYPE_CHECKING from git import GitCommandError, Repo from semantic_release.cli.masking_filter import MaskingFilter from semantic_release.cli.util import indented, noop_report from semantic_release.errors import ( GitAddError, GitCommitEmptyIndexError, GitCommitError, GitPushError, GitTagError, ) from semantic_release.globals import logger if TYPE_CHECKING: # pragma: no cover from contextlib import _GeneratorContextManager from logging import Logger from typing import Sequence from git import Actor class GitProject: def __init__( self, directory: Path | str = ".", commit_author: Actor | None = None, credential_masker: MaskingFilter | None = None, ) -> None: self._project_root = Path(directory).resolve() self._logger = logger self._cred_masker = credential_masker or MaskingFilter() self._commit_author = commit_author @property def project_root(self) -> Path: return self._project_root @property def logger(self) -> Logger: return self._logger def _get_custom_environment( self, repo: Repo, custom_vars: dict[str, str] | None = None, ) -> nullcontext[None] | _GeneratorContextManager[None]: """ git.custom_environment is a context manager but is not reentrant, so once we have "used" it we need to throw it away and re-create it in order to use it again """ author_vars = ( { "GIT_AUTHOR_NAME": self._commit_author.name, "GIT_AUTHOR_EMAIL": self._commit_author.email, "GIT_COMMITTER_NAME": self._commit_author.name, "GIT_COMMITTER_EMAIL": self._commit_author.email, } if self._commit_author else {} ) custom_env_vars = { **author_vars, **(custom_vars or {}), } return ( nullcontext() if not custom_env_vars else repo.git.custom_environment(**custom_env_vars) ) def is_dirty(self) -> bool: with Repo(str(self.project_root)) as repo: return repo.is_dirty() def git_add( self, paths: Sequence[Path | str], force: bool = False, strict: bool = False, noop: bool = False, ) -> None: if noop: noop_report( indented( f"""\ would have run: git add {str.join(" ", [str(Path(p)) for p in paths])} """ ) ) return git_args = dict( filter( lambda k_v: k_v[1], # if truthy { "force": force, }.items(), ) ) with Repo(str(self.project_root)) as repo: # TODO: in future this loop should be 1 line: # repo.index.add(all_paths_to_add, force=False) # noqa: ERA001 # but since 'force' is deliberately ineffective (as in docstring) in gitpython 3.1.18 # we have to do manually add each filepath, and catch the exception if it is an ignored file for updated_path in paths: try: repo.git.add(str(Path(updated_path)), **git_args) except GitCommandError as err: # noqa: PERF203, acceptable performance loss err_msg = f"Failed to add path ({updated_path}) to index" if strict: self.logger.exception(str(err)) raise GitAddError(err_msg) from err self.logger.warning(err_msg) def git_commit( self, message: str, date: int | None = None, commit_all: bool = False, no_verify: bool = False, noop: bool = False, ) -> None: git_args = dict( filter( lambda k_v: k_v[1], # if truthy { "a": commit_all, "m": message, "date": date, "no_verify": no_verify, }.items(), ) ) if noop: command = ( f"""\ GIT_AUTHOR_NAME={self._commit_author.name} \\ GIT_AUTHOR_EMAIL={self._commit_author.email} \\ GIT_COMMITTER_NAME={self._commit_author.name} \\ GIT_COMMITTER_EMAIL={self._commit_author.email} \\ """ if self._commit_author else "" ) # Indents the newlines so that terminal formatting is happy - note the # git commit line of the output is 24 spaces indented too # Only this message needs such special handling because of the newlines # that might be in a commit message between the subject and body indented_commit_message = message.replace("\n\n", "\n\n" + " " * 24) command += f"git commit -m '{indented_commit_message}'" command += "--all" if commit_all else "" command += "--no-verify" if no_verify else "" noop_report( indented( f"""\ would have run: {command} """ ) ) return with Repo(str(self.project_root)) as repo: has_index_changes = bool(repo.index.diff("HEAD")) has_working_changes = self.is_dirty() will_commit_files = has_index_changes or ( has_working_changes and commit_all ) if not will_commit_files: raise GitCommitEmptyIndexError("No changes to commit!") with self._get_custom_environment(repo): try: repo.git.commit(**git_args) except GitCommandError as err: self.logger.exception(str(err)) raise GitCommitError("Failed to commit changes") from err def git_tag( self, tag_name: str, message: str, isotimestamp: str, noop: bool = False ) -> None: try: datetime.fromisoformat(isotimestamp) except ValueError as err: raise ValueError("Invalid timestamp format") from err if noop: command = str.join( " ", [ f"GIT_COMMITTER_DATE={isotimestamp}", *( [ f"GIT_AUTHOR_NAME={self._commit_author.name}", f"GIT_AUTHOR_EMAIL={self._commit_author.email}", f"GIT_COMMITTER_NAME={self._commit_author.name}", f"GIT_COMMITTER_EMAIL={self._commit_author.email}", ] if self._commit_author else [""] ), f"git tag -a {tag_name} -m '{message}'", ], ) noop_report( indented( f"""\ would have run: {command} """ ) ) return with Repo(str(self.project_root)) as repo, self._get_custom_environment( repo, {"GIT_COMMITTER_DATE": isotimestamp}, ): try: repo.git.tag("-a", tag_name, m=message) except GitCommandError as err: self.logger.exception(str(err)) raise GitTagError(f"Failed to create tag ({tag_name})") from err def git_push_branch(self, remote_url: str, branch: str, noop: bool = False) -> None: if noop: noop_report( indented( f"""\ would have run: git push {self._cred_masker.mask(remote_url)} {branch} """ ) ) return with Repo(str(self.project_root)) as repo: try: repo.git.push(remote_url, branch) except GitCommandError as err: self.logger.exception(str(err)) raise GitPushError( f"Failed to push branch ({branch}) to remote" ) from err def git_push_tag(self, remote_url: str, tag: str, noop: bool = False) -> None: if noop: noop_report( indented( f"""\ would have run: git push {self._cred_masker.mask(remote_url)} tag {tag} """ # noqa: E501 ) ) return with Repo(str(self.project_root)) as repo: try: repo.git.push(remote_url, "tag", tag) except GitCommandError as err: self.logger.exception(str(err)) raise GitPushError(f"Failed to push tag ({tag}) to remote") from err python-semantic-release-10.4.1/src/semantic_release/globals.py000066400000000000000000000007201506116242600244260ustar00rootroot00000000000000"""Semantic Release Global Variables.""" from __future__ import annotations from logging import getLogger from typing import TYPE_CHECKING from semantic_release.enums import SemanticReleaseLogLevels if TYPE_CHECKING: from logging import Logger # GLOBAL VARIABLES log_level: SemanticReleaseLogLevels = SemanticReleaseLogLevels.WARNING """int: Logging level for semantic-release""" logger: Logger = getLogger(__package__) """Logger for semantic-release""" python-semantic-release-10.4.1/src/semantic_release/helpers.py000066400000000000000000000234341506116242600244540ustar00rootroot00000000000000from __future__ import annotations import importlib.util import os import re import string import sys from functools import lru_cache, reduce, wraps from pathlib import Path, PurePosixPath from re import IGNORECASE, compile as regexp from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Sequence, TypeVar from urllib.parse import urlsplit from semantic_release.globals import logger if TYPE_CHECKING: # pragma: no cover from logging import Logger from re import Pattern from typing import Iterable number_pattern = regexp(r"(?P\S*?)(?P\d[\d,]*)\b") hex_number_pattern = regexp( r"(?P\S*?)(?:0x)?(?P[0-9a-f]+)\b", IGNORECASE ) def get_number_from_str( string: str, default: int = -1, interpret_hex: bool = False ) -> int: if interpret_hex and (match := hex_number_pattern.search(string)): return abs(int(match.group("number"), 16)) if match := number_pattern.search(string): return int(match.group("number")) return default def sort_numerically( iterable: Iterable[str], reverse: bool = False, allow_hex: bool = False ) -> list[str]: # Alphabetically sort prefixes first, then sort by number alphabetized_list = sorted(iterable) # Extract prefixes in order to group items by prefix unmatched_items = [] prefixes: dict[str, list[str]] = {} for item in alphabetized_list: if not ( pattern_match := ( (hex_number_pattern.search(item) if allow_hex else None) or number_pattern.search(item) ) ): unmatched_items.append(item) continue prefix = prefix if (prefix := pattern_match.group("prefix")) else "" if prefix not in prefixes: prefixes[prefix] = [] prefixes[prefix].append(item) # Sort prefixes and items by number mixing in unmatched items as alphabetized with other prefixes return reduce( lambda acc, next_item: acc + next_item, [ ( sorted( prefixes[prefix], key=lambda x: get_number_from_str( x, default=-1, interpret_hex=allow_hex ), reverse=reverse, ) if prefix in prefixes else [prefix] ) for prefix in sorted([*prefixes.keys(), *unmatched_items]) ], [], ) def text_reducer(text: str, filter_pair: tuple[Pattern[str], str]) -> str: """Reduce function to apply mulitple filters to a string""" if not text: # abort if the paragraph is empty return text filter_pattern, replacement = filter_pair return filter_pattern.sub(replacement, text) def validate_types_in_sequence( sequence: Sequence, types: type | tuple[type, ...] ) -> bool: """Validate that all elements in a sequence are of a specific type""" return all(isinstance(item, types) for item in sequence) def format_arg(value: Any) -> str: """Helper to format an argument an argument for logging""" if type(value) == str: return f"'{value.strip()}'" return str(value) def check_tag_format(tag_format: str) -> None: if "version" not in (f[1] for f in string.Formatter().parse(tag_format)): raise ValueError( f"Invalid tag_format {tag_format!r}, must use 'version' as a format key" ) _R = TypeVar("_R") _FuncType = Callable[..., _R] def logged_function(logger: Logger) -> Callable[[_FuncType[_R]], _FuncType[_R]]: """ Decorator which adds debug logging of a function's input arguments and return value. The input arguments are logged before the function is called, and the return value is logged once it has completed. :param logger: Logger to send output to. """ def _logged_function(func: _FuncType[_R]) -> _FuncType[_R]: @wraps(func) def _wrapper(*args: Any, **kwargs: Any) -> _R: logger.debug( "%s(%s, %s)", func.__name__, ", ".join([format_arg(x) for x in args]), ", ".join([f"{k}={format_arg(v)}" for k, v in kwargs.items()]), ) # Call function result = func(*args, **kwargs) # Log result logger.debug("%s -> %s", func.__qualname__, str(result)) return result return _wrapper return _logged_function @logged_function(logger) def dynamic_import(import_path: str) -> Any: """ Dynamically import an object from a conventionally formatted "module:attribute" string """ if ":" not in import_path: raise ValueError( f"Invalid import path {import_path!r}, must use 'module:Class' format" ) # Split the import path into module and attribute module_name, attr = import_path.split(":", maxsplit=1) # Check if the module is a file path, if it can be resolved and exists on disk then import as a file module_filepath = Path(module_name).resolve() if module_filepath.exists(): module_path = ( module_filepath.stem if Path(module_name).is_absolute() else str(Path(module_name).with_suffix("")).replace(os.sep, ".").lstrip(".") ) if module_path not in sys.modules: logger.debug("Loading '%s' from file '%s'", module_path, module_filepath) spec = importlib.util.spec_from_file_location( module_path, str(module_filepath) ) if spec is None: raise ImportError(f"Could not import {module_filepath}") module = importlib.util.module_from_spec(spec) # type: ignore[arg-type] sys.modules.update({spec.name: module}) spec.loader.exec_module(module) # type: ignore[union-attr] return getattr(sys.modules[module_path], attr) # Otherwise, import as a module try: logger.debug("Importing module '%s'", module_name) module = importlib.import_module(module_name) logger.debug("Loading '%s' from module '%s'", attr, module_name) return getattr(module, attr) except TypeError as err: raise ImportError( str.join( "\n", [ str(err.args[0]), "Verify the import format matches 'module:attribute' or 'path/to/module:attribute'", ], ) ) from err class ParsedGitUrl(NamedTuple): """Container for the elements parsed from a git URL""" scheme: str netloc: str namespace: str repo_name: str @lru_cache(maxsize=512) def parse_git_url(url: str) -> ParsedGitUrl: """ Attempt to parse a string as a git url http[s]://, git://, file://, or ssh format, into a ParsedGitUrl. supported examples: http://git.mycompany.com/username/myproject.git https://github.com/username/myproject.git https://gitlab.com/group/subgroup/myproject.git https://git.mycompany.com:4443/username/myproject.git git://host.xz/path/to/repo.git/ git://host.xz:9418/path/to/repo.git/ git@github.com:username/myproject.git <-- assumes ssh:// ssh://git@github.com:3759/myproject.git <-- non-standard, but assume user 3759 ssh://git@github.com:username/myproject.git ssh://git@bitbucket.org:7999/username/myproject.git git+ssh://git@github.com:username/myproject.git /Users/username/dev/remote/myproject.git <-- Posix File paths file:///Users/username/dev/remote/myproject.git C:/Users/username/dev/remote/myproject.git <-- Windows File paths file:///C:/Users/username/dev/remote/myproject.git REFERENCE: https://stackoverflow.com/questions/31801271/what-are-the-supported-git-url-formats Raises ValueError if the url can't be parsed. """ logger.debug("Parsing git url %r", url) # Normalizers are a list of tuples of (pattern, replacement) normalizers = [ # normalize implicit ssh urls to explicit ssh:// (r"^([\w._-]+@)", r"ssh://\1"), # normalize git+ssh:// urls to ssh:// (r"^git\+ssh://", "ssh://"), # normalize an scp like syntax to URL compatible syntax # excluding port definitions (:#####) & including numeric usernames (r"(ssh://(?:[\w._-]+@)?[\w.-]+):(?!\d{1,5}/\w+/)(.*)$", r"\1/\2"), # normalize implicit file (windows || posix) urls to explicit file:// urls (r"^([C-Z]:/)|^/(\w)", r"file:///\1\2"), ] for pattern, replacement in normalizers: url = re.compile(pattern).sub(replacement, url) # run the url through urlsplit to separate out the parts urllib_split = urlsplit(url) # Fail if url scheme not found if not urllib_split.scheme: raise ValueError(f"Cannot parse {url!r}") # We have been able to parse the url with urlsplit, # so it's a (file|git|ssh|https?)://... structure # but we aren't validating the protocol scheme as its not our business # use PosixPath to normalize the path & then separate out the namespace & repo_name namespace, _, name = ( str(PurePosixPath(urllib_split.path)).lstrip("/").rpartition("/") ) # strip out the .git at the end of the repo_name if present name = name[:-4] if name.endswith(".git") else name # check that we have all the required parts of the url required_parts = [ urllib_split.scheme, # Allow empty net location for file:// urls True if urllib_split.scheme == "file" else urllib_split.netloc, namespace, name, ] if not all(required_parts): raise ValueError(f"Bad url: {url!r}") return ParsedGitUrl( scheme=urllib_split.scheme, netloc=urllib_split.netloc, namespace=namespace, repo_name=name, ) python-semantic-release-10.4.1/src/semantic_release/hvcs/000077500000000000000000000000001506116242600233755ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/hvcs/__init__.py000066400000000000000000000007561506116242600255160ustar00rootroot00000000000000from semantic_release.hvcs._base import HvcsBase from semantic_release.hvcs.bitbucket import Bitbucket from semantic_release.hvcs.gitea import Gitea from semantic_release.hvcs.github import Github from semantic_release.hvcs.gitlab import Gitlab from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.hvcs.token_auth import TokenAuth __all__ = [ "Bitbucket", "Gitea", "Github", "Gitlab", "HvcsBase", "RemoteHvcsBase", "TokenAuth", ] python-semantic-release-10.4.1/src/semantic_release/hvcs/_base.py000066400000000000000000000050501506116242600250200ustar00rootroot00000000000000"""Common functionality and interface for interacting with Git remote VCS""" from __future__ import annotations import warnings from abc import ABCMeta, abstractmethod from functools import lru_cache from typing import TYPE_CHECKING from semantic_release.helpers import parse_git_url if TYPE_CHECKING: # pragma: no cover from typing import Any, Callable class HvcsBase(metaclass=ABCMeta): """ Interface for subclasses interacting with a remote vcs environment Methods generally have a base implementation are implemented here but likely just provide a not-supported message but return gracefully This class cannot be instantated directly but must be inherited from and implement the designated abstract methods. """ def __init__(self, remote_url: str, *args: Any, **kwargs: Any) -> None: self._remote_url = remote_url if parse_git_url(remote_url) else "" self._name: str | None = None self._owner: str | None = None def _not_supported(self: HvcsBase, method_name: str) -> None: warnings.warn( f"{method_name} is not supported by {type(self).__qualname__}", stacklevel=2, ) @lru_cache(maxsize=1) def _get_repository_owner_and_name(self) -> tuple[str, str]: """ Parse the repository's remote url to identify the repository owner and name """ parsed_git_url = parse_git_url(self._remote_url) return parsed_git_url.namespace, parsed_git_url.repo_name @property def repo_name(self) -> str: if self._name is None: _, name = self._get_repository_owner_and_name() self._name = name return self._name @property def owner(self) -> str: if self._owner is None: _owner, _ = self._get_repository_owner_and_name() self._owner = _owner return self._owner @abstractmethod def remote_url(self, use_token: bool) -> str: """ Return the remote URL for the repository, including the token for authentication if requested by setting the `use_token` parameter to True, """ self._not_supported(self.remote_url.__name__) return "" @abstractmethod def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: """ Return a list of functions that can be used as filters in a Jinja2 template ex. filters to convert text to URLs for issues and commits """ self._not_supported(self.get_changelog_context_filters.__name__) return () python-semantic-release-10.4.1/src/semantic_release/hvcs/bitbucket.py000066400000000000000000000236441506116242600257340ustar00rootroot00000000000000"""Helper code for interacting with a Bitbucket remote VCS""" # Note: Bitbucket doesn't support releases. But it allows users to use # `semantic-release version` without having to specify `--no-vcs-release`. from __future__ import annotations import os from functools import lru_cache from pathlib import PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING from urllib3.util.url import Url, parse_url from semantic_release.globals import logger from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase if TYPE_CHECKING: # pragma: no cover from typing import Any, Callable class Bitbucket(RemoteHvcsBase): """ Bitbucket HVCS interface for interacting with BitBucket repositories This class supports the following products: - BitBucket Cloud - BitBucket Data Center Server (on-premises installations) This interface does its best to detect which product is configured based on the provided domain. If it is the official `bitbucket.org`, the default domain, then it is considered as BitBucket Cloud which uses the subdomain `api.bitbucket.org/2.0` for api communication. If the provided domain is anything else, than it is assumed to be communicating with an on-premise or 3rd-party maintained BitBucket instance which matches with the BitBucket Data Center Server product. The on-prem server product uses a path prefix for handling api requests which is configured to be `server.domain/rest/api/1.0` based on the documentation in April 2024. """ OFFICIAL_NAME = "Bitbucket" DEFAULT_DOMAIN = "bitbucket.org" DEFAULT_API_SUBDOMAIN_PREFIX = "api" DEFAULT_API_PATH_CLOUD = "/2.0" DEFAULT_API_PATH_ONPREM = "/rest/api/1.0" DEFAULT_API_URL_CLOUD = f"https://{DEFAULT_API_SUBDOMAIN_PREFIX}.{DEFAULT_DOMAIN}{DEFAULT_API_PATH_CLOUD}" DEFAULT_ENV_TOKEN_NAME = "BITBUCKET_TOKEN" # noqa: S105 def __init__( self, remote_url: str, *, hvcs_domain: str | None = None, hvcs_api_domain: str | None = None, token: str | None = None, allow_insecure: bool = False, **kwargs: Any, # noqa: ARG002 ) -> None: super().__init__(remote_url) self.token = token # NOTE: Uncomment in the future when we actually have functionalty to # use the api, but currently there is none. # auth = None if not self.token else TokenAuth(self.token) # self.session = build_requests_session(auth=auth) domain_url = self._normalize_url( hvcs_domain or f"https://{self.DEFAULT_DOMAIN}", allow_insecure=allow_insecure, ) # Strip any auth, query or fragment from the domain self._hvcs_domain = parse_url( Url( scheme=domain_url.scheme, host=domain_url.host, port=domain_url.port, path=str(PurePosixPath(domain_url.path or "/")), ).url.rstrip("/") ) # Parse api domain if provided otherwise infer from domain api_domain_parts = self._normalize_url( hvcs_api_domain or self._derive_api_url_from_base_domain(), allow_insecure=allow_insecure, ) # As Bitbucket Cloud and Bitbucket Server (on-prem) have different api paths # lets check what we have been given and set the api url accordingly # ref: https://developer.atlassian.com/server/bitbucket/how-tos/command-line-rest/ # NOTE: BitBucket Server (on premise) uses a path prefix '/rest/api/1.0' for the api # while BitBucket Cloud uses a separate subdomain with '/2.0' path prefix is_bitbucket_cloud = bool( self.hvcs_domain.url == f"https://{self.DEFAULT_DOMAIN}" ) if ( is_bitbucket_cloud and hvcs_api_domain and api_domain_parts.url not in Bitbucket.DEFAULT_API_URL_CLOUD ): # Api was provied but is not a subset of the expected one, raise an error # we check for a subset because the user may not have provided the full api path # but the correct domain. If they didn't, then we are erroring out here. raise ValueError( f"Invalid api domain {api_domain_parts.url} for BitBucket Cloud. " f"Expected {Bitbucket.DEFAULT_API_URL_CLOUD}." ) # Set the api url to the default cloud one if we are on cloud, otherwise # use the verified api domain for a on-prem server self._api_url = parse_url( Bitbucket.DEFAULT_API_URL_CLOUD if is_bitbucket_cloud else Url( # Strip any auth, query or fragment from the domain scheme=api_domain_parts.scheme, host=api_domain_parts.host, port=api_domain_parts.port, path=str( PurePosixPath( # pass any custom server prefix path but ensure we don't # double up the api path in the case the user provided it str.replace( api_domain_parts.path or "", self.DEFAULT_API_PATH_ONPREM, "", ).lstrip("/") or "/", # apply the on-prem api path self.DEFAULT_API_PATH_ONPREM.lstrip("/"), ) ), ).url.rstrip("/") ) def _derive_api_url_from_base_domain(self) -> Url: return parse_url( Url( # infer from Domain url and append the api path **{ **self.hvcs_domain._asdict(), "host": self.hvcs_domain.host, "path": str( PurePosixPath( str.lstrip(self.hvcs_domain.path or "", "/") or "/", self.DEFAULT_API_PATH_ONPREM.lstrip("/"), ) ), } ).url.rstrip("/") ) @lru_cache(maxsize=1) def _get_repository_owner_and_name(self) -> tuple[str, str]: # ref: https://support.atlassian.com/bitbucket-cloud/docs/variables-and-secrets/ if "BITBUCKET_REPO_FULL_NAME" in os.environ: logger.info("Getting repository owner and name from environment variables.") owner, name = os.environ["BITBUCKET_REPO_FULL_NAME"].rsplit("/", 1) return owner, name return super()._get_repository_owner_and_name() def remote_url(self, use_token: bool = True) -> str: """Get the remote url including the token for authentication if requested""" if not use_token: return self._remote_url if not self.token: raise ValueError("Requested to use token but no token set.") # If the user is set, assume the token is an user secret. This will work # on any repository the user has access to. # https://support.atlassian.com/bitbucket-cloud/docs/push-back-to-your-repository # If the user variable is not set, assume it is a repository token # which will only work on the repository it was created for. # https://support.atlassian.com/bitbucket-cloud/docs/using-access-tokens user = os.environ.get("BITBUCKET_USER", "x-token-auth") return self.create_server_url( auth=f"{user}:{self.token}" if user else self.token, path=f"/{self.owner}/{self.repo_name}.git", ) def compare_url(self, from_rev: str, to_rev: str) -> str: """ Get the Bitbucket comparison link between two version tags. :param from_rev: The older version to compare. :param to_rev: The newer version to compare. :return: Link to view a comparison between the two versions. """ return self.create_repo_url( repo_path=f"/branches/compare/{from_rev}%0D{to_rev}" ) def commit_hash_url(self, commit_hash: str) -> str: return self.create_repo_url(repo_path=f"/commits/{commit_hash}") def pull_request_url(self, pr_number: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(pr_number, str) and ( match := regexp(r"(\d+)$").search(pr_number) ): try: pr_number = int(match.group(1)) except ValueError: return "" if isinstance(pr_number, int): return self.create_repo_url(repo_path=f"/pull-requests/{pr_number}") return "" @staticmethod def format_w_official_vcs_name(format_str: str) -> str: if "%s" in format_str: return format_str % Bitbucket.OFFICIAL_NAME if "{}" in format_str: return format_str.format(Bitbucket.OFFICIAL_NAME) if "{vcs_name}" in format_str: return format_str.format(vcs_name=Bitbucket.OFFICIAL_NAME) return format_str def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: return ( self.create_server_url, self.create_repo_url, self.commit_hash_url, self.compare_url, self.pull_request_url, self.format_w_official_vcs_name, ) def upload_dists(self, tag: str, dist_glob: str) -> int: return super().upload_dists(tag, dist_glob) def create_or_update_release( self, tag: str, release_notes: str, prerelease: bool = False ) -> int | str: return super().create_or_update_release(tag, release_notes, prerelease) def create_release( self, tag: str, release_notes: str, prerelease: bool = False, assets: list[str] | None = None, noop: bool = False, ) -> int | str: return super().create_release(tag, release_notes, prerelease, assets, noop) RemoteHvcsBase.register(Bitbucket) python-semantic-release-10.4.1/src/semantic_release/hvcs/gitea.py000066400000000000000000000322271506116242600250460ustar00rootroot00000000000000"""Helper code for interacting with a Gitea remote VCS""" from __future__ import annotations import glob import os from pathlib import PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING from requests import HTTPError, JSONDecodeError from urllib3.util.url import Url, parse_url from semantic_release.cli.util import noop_report from semantic_release.errors import ( AssetUploadError, IncompleteReleaseError, UnexpectedResponse, ) from semantic_release.globals import logger from semantic_release.helpers import logged_function from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.hvcs.token_auth import TokenAuth from semantic_release.hvcs.util import build_requests_session, suppress_not_found if TYPE_CHECKING: # pragma: no cover from typing import Any, Callable class Gitea(RemoteHvcsBase): """Gitea helper class""" OFFICIAL_NAME = "Gitea" DEFAULT_DOMAIN = "gitea.com" DEFAULT_API_PATH = "/api/v1" DEFAULT_ENV_TOKEN_NAME = "GITEA_TOKEN" # noqa: S105 def __init__( self, remote_url: str, *, hvcs_domain: str | None = None, token: str | None = None, allow_insecure: bool = False, **_kwargs: Any, ) -> None: super().__init__(remote_url) self.token = token auth = None if not self.token else TokenAuth(self.token) self.session = build_requests_session(auth=auth) domain_url = self._normalize_url( hvcs_domain or os.getenv("GITEA_SERVER_URL", "") or f"https://{self.DEFAULT_DOMAIN}", allow_insecure=allow_insecure, ) # Strip any auth, query or fragment from the domain self._hvcs_domain = parse_url( Url( scheme=domain_url.scheme, host=domain_url.host, port=domain_url.port, path=str(PurePosixPath(domain_url.path or "/")), ).url.rstrip("/") ) self._api_url = self._normalize_url( os.getenv("GITEA_API_URL", "").rstrip("/") or Url( # infer from Domain url and append the default api path **{ **self.hvcs_domain._asdict(), "path": f"{self.hvcs_domain.path or ''}{self.DEFAULT_API_PATH}", } ).url, allow_insecure=allow_insecure, ) @logged_function(logger) def create_release( self, tag: str, release_notes: str, prerelease: bool = False, assets: list[str] | None = None, noop: bool = False, ) -> int: """ Create a new release Ref: https://gitea.com/api/swagger#/repository/repoCreateRelease :param tag: Tag to create release for :param release_notes: The release notes for this version :param prerelease: Whether or not this release should be specified as a prerelease :return: Whether the request succeeded """ if noop: noop_report( str.join( " ", [ f"would have created a release for tag {tag}", "with the following notes:\n", release_notes, ], ) ) if assets: noop_report( str.join( "\n", [ "would have uploaded the following assets to the release:", *assets, ], ) ) return -1 logger.info("Creating release for tag %s", tag) releases_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases", ) response = self.session.post( releases_endpoint, json={ "tag_name": tag, "name": tag, "body": release_notes, "draft": False, "prerelease": prerelease, }, ) # Raise an error if the request was not successful response.raise_for_status() try: release_id: int = response.json()["id"] logger.info("Successfully created release with ID: %s", release_id) except JSONDecodeError as err: raise UnexpectedResponse("Unreadable json response") from err except KeyError as err: raise UnexpectedResponse("JSON response is missing an id") from err errors = [] for asset in assets or []: logger.info("Uploading asset %s", asset) try: self.upload_release_asset(release_id, asset) except HTTPError as err: errors.append( AssetUploadError(f"Failed asset upload for {asset}").with_traceback( err.__traceback__ ) ) if len(errors) < 1: return release_id for error in errors: logger.exception(error) raise IncompleteReleaseError( f"Failed to upload asset{'s' if len(errors) > 1 else ''} to release!" ) @logged_function(logger) @suppress_not_found def get_release_id_by_tag(self, tag: str) -> int | None: """ Get a release by its tag name https://gitea.com/api/swagger#/repository/repoGetReleaseByTag :param tag: Tag to get release for :return: ID of found release """ tag_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/tags/{tag}", ) response = self.session.get(tag_endpoint) # Raise an error if the request was not successful response.raise_for_status() try: data = response.json() return data["id"] except JSONDecodeError as err: raise UnexpectedResponse("Unreadable json response") from err except KeyError as err: raise UnexpectedResponse("JSON response is missing an id") from err @logged_function(logger) def edit_release_notes(self, release_id: int, release_notes: str) -> int: """ Edit a release with updated change notes https://gitea.com/api/swagger#/repository/repoEditRelease :param id: ID of release to update :param release_notes: The release notes for this version :return: The ID of the release that was edited """ logger.info("Updating release %s", release_id) release_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/{release_id}", ) response = self.session.patch( release_endpoint, json={"body": release_notes}, ) # Raise an error if the request was not successful response.raise_for_status() return release_id @logged_function(logger) def create_or_update_release( self, tag: str, release_notes: str, prerelease: bool = False ) -> int: """ Post release changelog :param version: The version number :param changelog: The release notes for this version :return: The status of the request """ logger.info("Creating release for %s", tag) try: return self.create_release(tag, release_notes, prerelease) except HTTPError as err: logger.debug("error creating release: %s", err) logger.debug("looking for an existing release to update") release_id = self.get_release_id_by_tag(tag) if release_id is None: raise ValueError( f"release id for tag {tag} not found, and could not be created" ) # If this errors we let it die logger.debug("Found existing release %s, updating", release_id) return self.edit_release_notes(release_id, release_notes) @logged_function(logger) def asset_upload_url(self, release_id: str) -> str: """ Get the correct upload url for a release https://gitea.com/api/swagger#/repository/repoCreateReleaseAttachment :param release_id: ID of the release to upload to """ return self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/{release_id}/assets", ) @logged_function(logger) def upload_release_asset( self, release_id: int, file: str, label: str | None = None, # noqa: ARG002 ) -> bool: """ Upload an asset to an existing release https://gitea.com/api/swagger#/repository/repoCreateReleaseAttachment :param release_id: ID of the release to upload to :param file: Path of the file to upload :param label: this parameter has no effect :return: The status of the request """ url = self.asset_upload_url(release_id) with open(file, "rb") as attachment: name = os.path.basename(file) content_type = "application/octet-stream" response = self.session.post( url, params={"name": name}, data={}, files={ "attachment": ( name, attachment, content_type, ), }, ) # Raise an error if the request was not successful response.raise_for_status() logger.info( "Successfully uploaded %s to Gitea, url: %s, status code: %s", file, response.url, response.status_code, ) return True @logged_function(logger) def upload_dists(self, tag: str, dist_glob: str) -> int: """ Upload distributions to a release :param tag: Tag to upload for :param path: Path to the dist directory :return: The number of distributions successfully uploaded """ # Find the release corresponding to this tag release_id = self.get_release_id_by_tag(tag=tag) if not release_id: logger.warning("No release corresponds to tag %s, can't upload dists", tag) return 0 # Upload assets n_succeeded = 0 for file_path in ( f for f in glob.glob(dist_glob, recursive=True) if os.path.isfile(f) ): try: self.upload_release_asset(release_id, file_path) n_succeeded += 1 except HTTPError: # noqa: PERF203 logger.exception("error uploading asset %s", file_path) return n_succeeded def remote_url(self, use_token: bool = True) -> str: """Get the remote url including the token for authentication if requested""" if not (self.token and use_token): return self._remote_url return self.create_server_url( auth=self.token, path=f"{self.owner}/{self.repo_name}.git", ) def commit_hash_url(self, commit_hash: str) -> str: return self.create_repo_url(repo_path=f"/commit/{commit_hash}") def issue_url(self, issue_num: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(issue_num, str) and ( match := regexp(r"(\d+)$").search(issue_num) ): try: issue_num = int(match.group(1)) except ValueError: return "" if isinstance(issue_num, int): return self.create_repo_url(repo_path=f"/issues/{issue_num}") return "" def pull_request_url(self, pr_number: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(pr_number, str) and ( match := regexp(r"(\d+)$").search(pr_number) ): try: pr_number = int(match.group(1)) except ValueError: return "" if isinstance(pr_number, int): return self.create_repo_url(repo_path=f"/pulls/{pr_number}") return "" def create_release_url(self, tag: str = "") -> str: tag_str = tag.strip() tag_path = f"tag/{tag_str}" if tag_str else "" return self.create_repo_url(repo_path=f"releases/{tag_path}") @staticmethod def format_w_official_vcs_name(format_str: str) -> str: if "%s" in format_str: return format_str % Gitea.OFFICIAL_NAME if "{}" in format_str: return format_str.format(Gitea.OFFICIAL_NAME) if "{vcs_name}" in format_str: return format_str.format(vcs_name=Gitea.OFFICIAL_NAME) return format_str def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: return ( self.create_server_url, self.create_repo_url, self.commit_hash_url, self.issue_url, self.pull_request_url, self.create_release_url, self.format_w_official_vcs_name, ) RemoteHvcsBase.register(Gitea) python-semantic-release-10.4.1/src/semantic_release/hvcs/github.py000066400000000000000000000503271506116242600252400ustar00rootroot00000000000000"""Helper code for interacting with a GitHub remote VCS""" from __future__ import annotations import glob import mimetypes import os from functools import lru_cache from pathlib import PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING from requests import HTTPError, JSONDecodeError from urllib3.util.url import Url, parse_url from semantic_release.cli.util import noop_report from semantic_release.errors import ( AssetUploadError, IncompleteReleaseError, UnexpectedResponse, ) from semantic_release.globals import logger from semantic_release.helpers import logged_function from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.hvcs.token_auth import TokenAuth from semantic_release.hvcs.util import build_requests_session, suppress_not_found if TYPE_CHECKING: # pragma: no cover from typing import Any, Callable # Add a mime type for wheels # Fix incorrect entries in the `mimetypes` registry. # On Windows, the Python standard library's `mimetypes` reads in # mappings from file extension to MIME type from the Windows # registry. Other applications can and do write incorrect values # to this registry, which causes `mimetypes.guess_type` to return # incorrect values, which causes TensorBoard to fail to render on # the frontend. # This method hard-codes the correct mappings for certain MIME # types that are known to be either used by python-semantic-release or # problematic in general. if mimetypes.guess_type("test.whl")[0] != "application/octet-stream": mimetypes.add_type("application/octet-stream", ".whl") if mimetypes.guess_type("test.md")[0] != "text/markdown": mimetypes.add_type("text/markdown", ".md") class Github(RemoteHvcsBase): """ GitHub HVCS interface for interacting with GitHub repositories This class supports the following products: - GitHub Free, Pro, & Team - GitHub Enterprise Cloud - GitHub Enterprise Server (on-premises installations) This interface does its best to detect which product is configured based on the provided domain. If it is the official `github.com`, the default domain, then it is considered as GitHub Enterprise Cloud which uses the subdomain `api.github.com` for api communication. If the provided domain is anything else, than it is assumed to be communicating with an on-premise or 3rd-party maintained GitHub instance which matches with the GitHub Enterprise Server product. The on-prem server product uses a path prefix for handling api requests which is configured to be `server.domain/api/v3` based on the documentation in April 2024. """ OFFICIAL_NAME = "GitHub" DEFAULT_DOMAIN = "github.com" DEFAULT_API_SUBDOMAIN_PREFIX = "api" DEFAULT_API_DOMAIN = f"{DEFAULT_API_SUBDOMAIN_PREFIX}.{DEFAULT_DOMAIN}" DEFAULT_API_PATH_CLOUD = "/" # no path prefix! DEFAULT_API_PATH_ONPREM = "/api/v3" DEFAULT_API_URL_CLOUD = f"https://{DEFAULT_API_SUBDOMAIN_PREFIX}.{DEFAULT_DOMAIN}{DEFAULT_API_PATH_CLOUD}".rstrip( "/" ) DEFAULT_ENV_TOKEN_NAME = "GH_TOKEN" # noqa: S105 def __init__( self, remote_url: str, *, hvcs_domain: str | None = None, hvcs_api_domain: str | None = None, token: str | None = None, allow_insecure: bool = False, **_kwargs: Any, ) -> None: super().__init__(remote_url) self.token = token auth = None if not self.token else TokenAuth(self.token) self.session = build_requests_session(auth=auth) # ref: https://docs.github.com/en/actions/reference/environment-variables#default-environment-variables domain_url_str = ( hvcs_domain or os.getenv("GITHUB_SERVER_URL", "") or f"https://{self.DEFAULT_DOMAIN}" ) domain_url = self._normalize_url( domain_url_str, allow_insecure=allow_insecure, ) # Strip any auth, query or fragment from the domain self._hvcs_domain = parse_url( Url( scheme=domain_url.scheme, host=domain_url.host, port=domain_url.port, path=str(PurePosixPath(domain_url.path or "/")), ).url.rstrip("/") ) # ref: https://docs.github.com/en/actions/reference/environment-variables#default-environment-variables api_url_str = ( hvcs_api_domain or os.getenv("GITHUB_API_URL", "") or self._derive_api_url_from_base_domain() ) api_domain_parts = self._normalize_url( api_url_str, allow_insecure=allow_insecure, ) # As GitHub Enterprise Cloud and GitHub Enterprise Server (on-prem) have different api locations # lets check what we have been given and set the api url accordingly # NOTE: Github Server (on premise) uses a path prefix '/api/v3' for the api # while GitHub Enterprise Cloud uses a separate subdomain as the base is_github_cloud = bool(self.hvcs_domain.url == f"https://{self.DEFAULT_DOMAIN}") if ( is_github_cloud and hvcs_api_domain and api_domain_parts.url not in Github.DEFAULT_API_URL_CLOUD ): # Api was provied but is not a subset of the expected one, raise an error # we check for a subset because the user may not have provided the full api path # but the correct domain. If they didn't, then we are erroring out here. raise ValueError( f"Invalid api domain {api_domain_parts.url} for GitHub Enterprise Cloud. " f"Expected {Github.DEFAULT_API_URL_CLOUD}." ) # Set the api url to the default cloud one if we are on cloud, otherwise # use the verified api domain for a on-prem server self._api_url = parse_url( Github.DEFAULT_API_URL_CLOUD if is_github_cloud else Url( # Strip any auth, query or fragment from the domain scheme=api_domain_parts.scheme, host=api_domain_parts.host, port=api_domain_parts.port, path=str( PurePosixPath( # pass any custom server prefix path but ensure we don't # double up the api path in the case the user provided it str.replace( api_domain_parts.path or "", self.DEFAULT_API_PATH_ONPREM, "", ).lstrip("/") or "/", # apply the on-prem api path self.DEFAULT_API_PATH_ONPREM.lstrip("/"), ) ), ).url.rstrip("/") ) def _derive_api_url_from_base_domain(self) -> Url: return parse_url( Url( # infer from Domain url and prepend the default api subdomain **{ **self.hvcs_domain._asdict(), "host": self.hvcs_domain.host, "path": str( PurePosixPath( str.lstrip(self.hvcs_domain.path or "", "/") or "/", self.DEFAULT_API_PATH_ONPREM.lstrip("/"), ) ), } ).url.rstrip("/") ) @lru_cache(maxsize=1) def _get_repository_owner_and_name(self) -> tuple[str, str]: # Github actions context if "GITHUB_REPOSITORY" in os.environ: logger.debug("getting repository owner and name from environment variables") owner, name = os.environ["GITHUB_REPOSITORY"].rsplit("/", 1) return owner, name return super()._get_repository_owner_and_name() @logged_function(logger) def create_release( self, tag: str, release_notes: str, prerelease: bool = False, assets: list[str] | None = None, noop: bool = False, ) -> int: """ Create a new release REF: https://docs.github.com/rest/reference/repos#create-a-release :param tag: Tag to create release for :param release_notes: The release notes for this version :param prerelease: Whether or not this release should be created as a prerelease :param assets: a list of artifacts to upload to the release :return: the ID of the release """ if noop: noop_report( str.join( " ", [ f"would have created a release for tag {tag}", "with the following notes:\n", release_notes, ], ) ) if assets: noop_report( str.join( "\n", [ "would have uploaded the following assets to the release:", *assets, ], ) ) return -1 logger.info("Creating release for tag %s", tag) releases_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases", ) response = self.session.post( releases_endpoint, json={ "tag_name": tag, "name": tag, "body": release_notes, "draft": False, "prerelease": prerelease, }, ) # Raise an error if the request was not successful response.raise_for_status() try: release_id: int = response.json()["id"] logger.info("Successfully created release with ID: %s", release_id) except JSONDecodeError as err: raise UnexpectedResponse("Unreadable json response") from err except KeyError as err: raise UnexpectedResponse("JSON response is missing an id") from err errors = [] for asset in assets or []: logger.info("Uploading asset %s", asset) try: self.upload_release_asset(release_id, asset) except HTTPError as err: errors.append( AssetUploadError(f"Failed asset upload for {asset}").with_traceback( err.__traceback__ ) ) if len(errors) < 1: return release_id for error in errors: logger.exception(error) raise IncompleteReleaseError( f"Failed to upload asset{'s' if len(errors) > 1 else ''} to release!" ) @logged_function(logger) @suppress_not_found def get_release_id_by_tag(self, tag: str) -> int | None: """ Get a release by its tag name https://docs.github.com/rest/reference/repos#get-a-release-by-tag-name :param tag: Tag to get release for :return: ID of release, if found, else None """ tag_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/tags/{tag}", ) response = self.session.get(tag_endpoint) # Raise an error if the request was not successful response.raise_for_status() try: data = response.json() return data["id"] except JSONDecodeError as err: raise UnexpectedResponse("Unreadable json response") from err except KeyError as err: raise UnexpectedResponse("JSON response is missing an id") from err @logged_function(logger) def edit_release_notes(self, release_id: int, release_notes: str) -> int: """ Edit a release with updated change notes https://docs.github.com/rest/reference/repos#update-a-release :param release_id: ID of release to update :param release_notes: The release notes for this version :return: The ID of the release that was edited """ logger.info("Updating release %s", release_id) release_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/{release_id}", ) response = self.session.post( release_endpoint, json={"body": release_notes}, ) # Raise an error if the update was unsuccessful response.raise_for_status() return release_id @logged_function(logger) def create_or_update_release( self, tag: str, release_notes: str, prerelease: bool = False ) -> int: """ Post release changelog :param tag: The version number :param release_notes: The release notes for this version :param prerelease: Whether or not this release should be created as a prerelease :return: The status of the request """ logger.info("Creating release for %s", tag) try: return self.create_release(tag, release_notes, prerelease) except HTTPError as err: logger.debug("error creating release: %s", err) logger.debug("looking for an existing release to update") release_id = self.get_release_id_by_tag(tag) if release_id is None: raise ValueError( f"release id for tag {tag} not found, and could not be created" ) logger.debug("Found existing release %s, updating", release_id) # If this errors we let it die return self.edit_release_notes(release_id, release_notes) @logged_function(logger) @suppress_not_found def asset_upload_url(self, release_id: str) -> str | None: """ Get the correct upload url for a release https://docs.github.com/en/enterprise-server@3.5/rest/releases/releases#get-a-release :param release_id: ID of the release to upload to :return: URL to upload for a release if found, else None """ # https://docs.github.com/en/enterprise-server@3.5/rest/releases/assets#upload-a-release-asset release_url = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/{release_id}" ) response = self.session.get(release_url) response.raise_for_status() try: upload_url: str = response.json()["upload_url"] return upload_url.replace("{?name,label}", "") except JSONDecodeError as err: raise UnexpectedResponse("Unreadable json response") from err except KeyError as err: raise UnexpectedResponse( "JSON response is missing a key 'upload_url'" ) from err @logged_function(logger) def upload_release_asset( self, release_id: int, file: str, label: str | None = None ) -> bool: """ Upload an asset to an existing release https://docs.github.com/rest/reference/repos#upload-a-release-asset :param release_id: ID of the release to upload to :param file: Path of the file to upload :param label: Optional custom label for this file :return: The status of the request """ url = self.asset_upload_url(release_id) if url is None: raise ValueError( "There is no associated url for uploading asset for release " f"{release_id}. Release url: " f"{self.api_url}/repos/{self.owner}/{self.repo_name}/releases/{release_id}" ) content_type = ( mimetypes.guess_type(file, strict=False)[0] or "application/octet-stream" ) with open(file, "rb") as data: response = self.session.post( url, params={"name": os.path.basename(file), "label": label}, headers={ "Content-Type": content_type, }, data=data.read(), ) # Raise an error if the upload was unsuccessful response.raise_for_status() logger.debug( "Successfully uploaded %s to Github, url: %s, status code: %s", file, response.url, response.status_code, ) return True @logged_function(logger) def upload_dists(self, tag: str, dist_glob: str) -> int: """ Upload distributions to a release :param tag: Version to upload for :param dist_glob: Path to the dist directory :return: The number of distributions successfully uploaded """ # Find the release corresponding to this version release_id = self.get_release_id_by_tag(tag=tag) if not release_id: logger.warning("No release corresponds to tag %s, can't upload dists", tag) return 0 # Upload assets n_succeeded = 0 for file_path in ( f for f in glob.glob(dist_glob, recursive=True) if os.path.isfile(f) ): try: self.upload_release_asset(release_id, file_path) n_succeeded += 1 except HTTPError: # noqa: PERF203 logger.exception("error uploading asset %s", file_path) return n_succeeded def remote_url(self, use_token: bool = True) -> str: """Get the remote url including the token for authentication if requested""" if not (self.token and use_token): logger.info("requested to use token for push but no token set, ignoring...") return self._remote_url actor = os.getenv("GITHUB_ACTOR", None) return self.create_server_url( auth=f"{actor}:{self.token}" if actor else self.token, path=f"/{self.owner}/{self.repo_name}.git", ) def compare_url(self, from_rev: str, to_rev: str) -> str: """ Get the GitHub comparison link between two version tags. :param from_rev: The older version to compare. :param to_rev: The newer version to compare. :return: Link to view a comparison between the two versions. """ return self.create_repo_url(repo_path=f"/compare/{from_rev}...{to_rev}") def commit_hash_url(self, commit_hash: str) -> str: return self.create_repo_url(repo_path=f"/commit/{commit_hash}") def issue_url(self, issue_num: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(issue_num, str) and ( match := regexp(r"(\d+)$").search(issue_num) ): try: issue_num = int(match.group(1)) except ValueError: return "" if isinstance(issue_num, int): return self.create_repo_url(repo_path=f"/issues/{issue_num}") return "" def pull_request_url(self, pr_number: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(pr_number, str) and ( match := regexp(r"(\d+)$").search(pr_number) ): try: pr_number = int(match.group(1)) except ValueError: return "" if isinstance(pr_number, int): return self.create_repo_url(repo_path=f"/pull/{pr_number}") return "" def create_release_url(self, tag: str = "") -> str: tag_str = tag.strip() tag_path = f"tag/{tag_str}" if tag_str else "" return self.create_repo_url(repo_path=f"releases/{tag_path}") @staticmethod def format_w_official_vcs_name(format_str: str) -> str: if "%s" in format_str: return format_str % Github.OFFICIAL_NAME if "{}" in format_str: return format_str.format(Github.OFFICIAL_NAME) if "{vcs_name}" in format_str: return format_str.format(vcs_name=Github.OFFICIAL_NAME) return format_str def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: return ( self.create_server_url, self.create_repo_url, self.commit_hash_url, self.compare_url, self.issue_url, self.pull_request_url, self.create_release_url, self.format_w_official_vcs_name, ) RemoteHvcsBase.register(Github) python-semantic-release-10.4.1/src/semantic_release/hvcs/gitlab.py000066400000000000000000000245561506116242600252250ustar00rootroot00000000000000"""Helper code for interacting with a Gitlab remote VCS""" from __future__ import annotations import os from functools import lru_cache from pathlib import PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING import gitlab import gitlab.exceptions import gitlab.v4 import gitlab.v4.objects from urllib3.util.url import Url, parse_url from semantic_release.cli.util import noop_report from semantic_release.errors import UnexpectedResponse from semantic_release.globals import logger from semantic_release.helpers import logged_function from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.hvcs.util import suppress_not_found if TYPE_CHECKING: # pragma: no cover from typing import Any, Callable from gitlab.v4.objects import Project as GitLabProject class Gitlab(RemoteHvcsBase): """Gitlab HVCS interface for interacting with Gitlab repositories""" DEFAULT_ENV_TOKEN_NAME = "GITLAB_TOKEN" # noqa: S105 # purposefully not CI_JOB_TOKEN as it is not a personal access token, # It is missing the permission to push to the repository, but has all others (releases, packages, etc.) OFFICIAL_NAME = "GitLab" DEFAULT_DOMAIN = "gitlab.com" def __init__( self, remote_url: str, *, hvcs_domain: str | None = None, token: str | None = None, allow_insecure: bool = False, **_kwargs: Any, ) -> None: super().__init__(remote_url) self.token = token self.project_namespace = f"{self.owner}/{self.repo_name}" self._project: GitLabProject | None = None domain_url = self._normalize_url( hvcs_domain or os.getenv("CI_SERVER_URL", "") or f"https://{self.DEFAULT_DOMAIN}", allow_insecure=allow_insecure, ) # Strip any auth, query or fragment from the domain self._hvcs_domain = parse_url( Url( scheme=domain_url.scheme, host=domain_url.host, port=domain_url.port, path=str(PurePosixPath(domain_url.path or "/")), ).url.rstrip("/") ) self._client = gitlab.Gitlab(self.hvcs_domain.url, private_token=self.token) self._api_url = parse_url(self._client.api_url) @property def project(self) -> GitLabProject: if self._project is None: self._project = self._client.projects.get(self.project_namespace) return self._project @lru_cache(maxsize=1) def _get_repository_owner_and_name(self) -> tuple[str, str]: """ Get the repository owner and name from GitLab CI environment variables, if available, otherwise from parsing the remote url """ if "CI_PROJECT_NAMESPACE" in os.environ and "CI_PROJECT_NAME" in os.environ: logger.debug("getting repository owner and name from environment variables") return os.environ["CI_PROJECT_NAMESPACE"], os.environ["CI_PROJECT_NAME"] return super()._get_repository_owner_and_name() @logged_function(logger) def create_release( self, tag: str, release_notes: str, prerelease: bool = False, # noqa: ARG002 assets: list[str] | None = None, # noqa: ARG002 noop: bool = False, ) -> str: """ Create a release in a remote VCS, adding any release notes and assets to it :param tag: The tag to create the release for :param release_notes: The changelog description for this version only :param prerelease: This parameter has no effect in GitLab :param assets: A list of paths to files to upload as assets (TODO: not implemented) :param noop: If True, do not perform any actions, only logger intents :return: The tag of the release :raises: GitlabAuthenticationError: If authentication is not correct :raises: GitlabCreateError: If the server cannot perform the request """ if noop: noop_report(f"would have created a release for tag {tag}") return tag logger.info("Creating release for %s", tag) # ref: https://docs.gitlab.com/ee/api/releases/index.html#create-a-release self.project.releases.create( { "name": tag, "tag_name": tag, "tag_message": tag, "description": release_notes, } ) logger.info("Successfully created release for %s", tag) return tag @logged_function(logger) @suppress_not_found def get_release_by_tag(self, tag: str) -> gitlab.v4.objects.ProjectRelease | None: """ Get a release by its tag name. :param tag: The tag name to get the release for :return: gitlab.v4.objects.ProjectRelease or None if not found :raises: gitlab.exceptions.GitlabAuthenticationError: If the user is not authenticated """ try: return self.project.releases.get(tag) except gitlab.exceptions.GitlabGetError: logger.debug("Release %s not found", tag) return None except KeyError as err: raise UnexpectedResponse("JSON response is missing commit.id") from err @logged_function(logger) def edit_release_notes( # type: ignore[override] self, release: gitlab.v4.objects.ProjectRelease, release_notes: str, ) -> str: """ Update the release notes for a given release. :param release: The release object to update :param release_notes: The new release notes :return: The release id :raises: GitlabAuthenticationError: If authentication is not correct :raises: GitlabUpdateError: If the server cannot perform the request """ logger.info( "Updating release %s [%s]", release.name, release.attributes.get("commit", {}).get("id"), ) release.description = release_notes release.save() return str(release.get_id()) @logged_function(logger) def create_or_update_release( self, tag: str, release_notes: str, prerelease: bool = False ) -> str: """ Create or update a release for the given tag in a remote VCS. :param tag: The tag to create or update the release for :param release_notes: The changelog description for this version only :param prerelease: This parameter has no effect in GitLab :return: The release id :raises ValueError: If the release could not be created or updated :raises gitlab.exceptions.GitlabAuthenticationError: If the user is not authenticated :raises GitlabUpdateError: If the server cannot perform the request """ try: return self.create_release( tag=tag, release_notes=release_notes, prerelease=prerelease ) except gitlab.GitlabCreateError: logger.info( "New release %s could not be created for project %s", tag, self.project_namespace, ) if (release_obj := self.get_release_by_tag(tag)) is None: raise ValueError( f"release for tag {tag} could not be found, and could not be created" ) logger.debug( "Found existing release commit %s, updating", release_obj.commit.get("id") ) # If this errors we let it die return self.edit_release_notes( release=release_obj, release_notes=release_notes, ) def remote_url(self, use_token: bool = True) -> str: """Get the remote url including the token for authentication if requested""" if not (self.token and use_token): return self._remote_url return self.create_server_url( auth=f"gitlab-ci-token:{self.token}", path=f"{self.project_namespace}.git", ) def compare_url(self, from_rev: str, to_rev: str) -> str: return self.create_repo_url(repo_path=f"/-/compare/{from_rev}...{to_rev}") def commit_hash_url(self, commit_hash: str) -> str: return self.create_repo_url(repo_path=f"/-/commit/{commit_hash}") def issue_url(self, issue_num: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(issue_num, str) and ( match := regexp(r"(\d+)$").search(issue_num) ): try: issue_num = int(match.group(1)) except ValueError: return "" if isinstance(issue_num, int): return self.create_repo_url(repo_path=f"/-/issues/{issue_num}") return "" def merge_request_url(self, mr_number: str | int) -> str: # Strips off any character prefix like '!' that usually exists if isinstance(mr_number, str) and ( match := regexp(r"(\d+)$").search(mr_number) ): try: mr_number = int(match.group(1)) except ValueError: return "" if isinstance(mr_number, int): return self.create_repo_url(repo_path=f"/-/merge_requests/{mr_number}") return "" def pull_request_url(self, pr_number: str | int) -> str: return self.merge_request_url(mr_number=pr_number) def upload_dists(self, tag: str, dist_glob: str) -> int: return super().upload_dists(tag, dist_glob) def create_release_url(self, tag: str = "") -> str: tag_str = tag.strip() return self.create_repo_url(repo_path=f"/-/releases/{tag_str}") @staticmethod def format_w_official_vcs_name(format_str: str) -> str: if "%s" in format_str: return format_str % Gitlab.OFFICIAL_NAME if "{}" in format_str: return format_str.format(Gitlab.OFFICIAL_NAME) if "{vcs_name}" in format_str: return format_str.format(vcs_name=Gitlab.OFFICIAL_NAME) return format_str def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: return ( self.create_server_url, self.create_repo_url, self.commit_hash_url, self.compare_url, self.issue_url, self.merge_request_url, self.pull_request_url, self.create_release_url, self.format_w_official_vcs_name, ) RemoteHvcsBase.register(Gitlab) python-semantic-release-10.4.1/src/semantic_release/hvcs/remote_hvcs_base.py000066400000000000000000000142671506116242600272710ustar00rootroot00000000000000"""Common functionality and interface for interacting with Git remote VCS""" from __future__ import annotations from abc import ABCMeta, abstractmethod from pathlib import PurePosixPath from typing import TYPE_CHECKING from urllib3.util.url import Url, parse_url from semantic_release.hvcs import HvcsBase if TYPE_CHECKING: # pragma: no cover from typing import Any class RemoteHvcsBase(HvcsBase, metaclass=ABCMeta): """ Interface for subclasses interacting with a remote VCS This abstract class is defined to provide common helper functions and a set of basic methods that all remote VCS environments usually support. If the remote vcs implementation (via subclass) does not support a functionality then it can just call super()'s method which defaults as a non-supported log message and empty results. This is more straightforward than checking for NotImplemented around every function call in the core library code. """ DEFAULT_ENV_TOKEN_NAME = "HVCS_TOKEN" # noqa: S105 def __init__(self, remote_url: str, *_args: Any, **_kwargs: Any) -> None: super().__init__(remote_url) self._hvcs_domain: Url | None = None self._api_url: Url | None = None @property def hvcs_domain(self) -> Url: if self._hvcs_domain is None: raise RuntimeError("Property 'hvcs_domain' was used before it was set!") return self._hvcs_domain @property def api_url(self) -> Url: if self._api_url is None: raise RuntimeError("Property 'api_url' was used before it was set!") return self._api_url @abstractmethod def upload_dists(self, tag: str, dist_glob: str) -> int: """ Upload built distributions to a release on a remote VCS that supports such uploads """ self._not_supported(self.upload_dists.__name__) return 0 @abstractmethod def create_release( self, tag: str, release_notes: str, prerelease: bool = False, assets: list[str] | None = None, noop: bool = False, ) -> int | str: """ Create a release in a remote VCS, if supported Which includes uploading any assets as part of the release """ self._not_supported(self.create_release.__name__) return -1 @abstractmethod def create_or_update_release( self, tag: str, release_notes: str, prerelease: bool = False ) -> int | str: """ Create or update a release for the given tag in a remote VCS, attaching the given changelog, if supported """ self._not_supported(self.create_or_update_release.__name__) return -1 def create_server_url( self, path: str, auth: str | None = None, query: str | None = None, fragment: str | None = None, ) -> str: # Ensure any path prefix is transferred but not doubled up on the derived url normalized_path = ( f"{self.hvcs_domain.path}/{path}" if self.hvcs_domain.path and not path.startswith(self.hvcs_domain.path) else path ) return self._derive_url( self.hvcs_domain, path=normalized_path, auth=auth, query=query, fragment=fragment, ) def create_repo_url( self, repo_path: str, query: str | None = None, fragment: str | None = None, ) -> str: return self.create_server_url( path=f"/{self.owner}/{self.repo_name}/{repo_path}", query=query, fragment=fragment, ) def create_api_url( self, endpoint: str, auth: str | None = None, query: str | None = None, fragment: str | None = None, ) -> str: # Ensure any api path prefix is transferred but not doubled up on the derived api url normalized_endpoint = ( f"{self.api_url.path}/{endpoint}" if self.api_url.path and not endpoint.startswith(self.api_url.path) else endpoint ) return self._derive_url( self.api_url, path=normalized_endpoint, auth=auth, query=query, fragment=fragment, ) @staticmethod def _derive_url( base_url: Url, path: str, auth: str | None = None, query: str | None = None, fragment: str | None = None, ) -> str: overrides = dict( filter( lambda x: x[1] is not None, { "auth": auth, "path": str(PurePosixPath("/", path.lstrip("/"))), "query": query, "fragment": fragment, }.items(), ) ) return Url( **{ **base_url._asdict(), **overrides, } ).url.rstrip("/") @staticmethod def _validate_url_scheme(url: Url, allow_insecure: bool = False) -> None: if url.scheme == "http" and not allow_insecure: raise ValueError("Insecure connections are currently disabled.") if url.scheme not in ["http", "https"]: raise ValueError( f"Invalid scheme {url.scheme} for {url.host}. " "Only http and https are supported." ) @staticmethod def _normalize_url(url: Url | str, allow_insecure: bool = False) -> Url: """ Function to ensure url scheme is populated & allowed Raises ------ TypeError: when url parameter is not a string or parsable url ValueError: when the url scheme is not http or https """ tgt_url = parse_url(url) if isinstance(url, str) else url if not isinstance(tgt_url, Url): raise TypeError( f"Invalid url type ({type(tgt_url)}) received, expected Url or string" ) if not tgt_url.scheme: new_scheme = "http" if allow_insecure else "https" tgt_url = Url(**{**tgt_url._asdict(), "scheme": new_scheme}) RemoteHvcsBase._validate_url_scheme(tgt_url, allow_insecure=allow_insecure) return tgt_url python-semantic-release-10.4.1/src/semantic_release/hvcs/token_auth.py000066400000000000000000000012271506116242600261120ustar00rootroot00000000000000from requests import PreparedRequest from requests.auth import AuthBase class TokenAuth(AuthBase): """ requests Authentication for token based authorization. This allows us to attach the Authorization header with a token to a session. """ def __init__(self, token: str) -> None: self.token = token def __eq__(self, other: object) -> bool: return self.token == getattr(other, "token", None) def __ne__(self, other: object) -> bool: return not self == other def __call__(self, req: PreparedRequest) -> PreparedRequest: req.headers["Authorization"] = f"token {self.token}" return req python-semantic-release-10.4.1/src/semantic_release/hvcs/util.py000066400000000000000000000054671506116242600247400ustar00rootroot00000000000000from __future__ import annotations from functools import wraps from typing import TYPE_CHECKING, Any, Callable, TypeVar from requests import HTTPError, Session from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry # type: ignore[import] from semantic_release.globals import logger if TYPE_CHECKING: # pragma: no cover from semantic_release.hvcs.token_auth import TokenAuth def build_requests_session( raise_for_status: bool = True, retry: bool | int | Retry = True, auth: TokenAuth | None = None, ) -> Session: """ Create a requests session. :param raise_for_status: If True, a hook to invoke raise_for_status be installed :param retry: If true, it will use default Retry configuration. if an integer, it will use default Retry configuration with given integer as total retry count. if Retry instance, it will use this instance. :param auth: Optional TokenAuth instance to be used to provide the Authorization header to the session :return: configured requests Session """ session = Session() if raise_for_status: session.hooks = {"response": [lambda r, *_, **__: r.raise_for_status()]} if retry: if isinstance(retry, bool): retry = Retry() elif isinstance(retry, int): retry = Retry(retry) elif not isinstance(retry, Retry): raise ValueError("retry should be a bool, int or Retry instance.") adapter = HTTPAdapter(max_retries=retry) session.mount("http://", adapter) session.mount("https://", adapter) if auth: logger.debug("setting up default session authentication") session.auth = auth return session _R = TypeVar("_R") def suppress_http_error_for_codes( *codes: int, ) -> Callable[[Callable[..., _R]], Callable[..., _R | None]]: """ For the codes given, return a decorator that will suppress HTTPErrors that are raised from responses that came with one of those status codes. The function will return False instead of raising the HTTPError """ def _suppress_http_error_for_codes( func: Callable[..., _R], ) -> Callable[..., _R | None]: @wraps(func) def _wrapper(*a: Any, **kw: Any) -> _R | None: try: return func(*a, **kw) except HTTPError as err: if err.response and err.response.status_code in codes: logger.warning( "%s received response %s: %s", func.__qualname__, err.response.status_code, str(err), ) return None return _wrapper return _suppress_http_error_for_codes suppress_not_found = suppress_http_error_for_codes(404) python-semantic-release-10.4.1/src/semantic_release/py.typed000066400000000000000000000000001506116242600241170ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/version/000077500000000000000000000000001506116242600241175ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/version/__init__.py000066400000000000000000000004161506116242600262310ustar00rootroot00000000000000import semantic_release.version.declaration as declaration from semantic_release.version.algorithm import ( next_version, tags_and_versions, ) from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version python-semantic-release-10.4.1/src/semantic_release/version/algorithm.py000066400000000000000000000375761506116242600265010ustar00rootroot00000000000000from __future__ import annotations import logging from contextlib import suppress from functools import reduce from queue import LifoQueue from typing import TYPE_CHECKING, Iterable from semantic_release.commit_parser import ParsedCommit from semantic_release.commit_parser.token import ParseError from semantic_release.const import DEFAULT_VERSION from semantic_release.enums import LevelBump, SemanticReleaseLogLevels from semantic_release.errors import InternalError, InvalidVersion from semantic_release.globals import logger from semantic_release.helpers import validate_types_in_sequence if TYPE_CHECKING: # pragma: no cover from typing import Sequence from git.objects.commit import Commit from git.refs.tag import Tag from git.repo.base import Repo from semantic_release.commit_parser import ( CommitParser, ParseResult, ParserOptions, ) from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version def tags_and_versions( tags: Iterable[Tag], translator: VersionTranslator ) -> list[tuple[Tag, Version]]: """ Return a list of 2-tuples, where each element is a tuple (tag, version) from the tags in the Git repo and their corresponding `Version` according to `Version.from_tag`. The returned list is sorted according to semver ordering rules. Tags which are not matched by `translator` are ignored. """ ts_and_vs: list[tuple[Tag, Version]] = [] for tag in tags: try: version = translator.from_tag(tag.name) except (NotImplementedError, InvalidVersion) as e: logger.warning( "Couldn't parse tag %s as as Version: %s", tag.name, str(e), exc_info=logger.isEnabledFor(logging.DEBUG), ) continue if version: ts_and_vs.append((tag, version)) logger.info("found %s previous tags", len(ts_and_vs)) return sorted(ts_and_vs, reverse=True, key=lambda v: v[1]) def _traverse_graph_for_commits( head_commit: Commit, latest_release_tag_str: str = "", ) -> Sequence[Commit]: # Depth-first search def dfs(start_commit: Commit, stop_nodes: set[Commit]) -> Sequence[Commit]: # Create a stack for DFS stack: LifoQueue[Commit] = LifoQueue() # Create a set to store visited graph nodes (commit objects in this case) visited: set[Commit] = set() # Initialize the result commits: list[Commit] = [] # Add the source node in the queue to start the search stack.put(start_commit) # Traverse the git history capturing each commit found before it reaches a stop node while not stack.empty(): if (node := stack.get()) in visited or node in stop_nodes: continue visited.add(node) commits.append(node) # Add all parent commits to the stack from left to right so that the rightmost is popped first # as the left side is generally the merged into branch for parent in node.parents: stack.put(parent) return commits # Run a Depth First Search to find all the commits since the last release return dfs( start_commit=head_commit, stop_nodes=set( head_commit.repo.iter_commits(latest_release_tag_str) if latest_release_tag_str else [] ), ) def _increment_version( latest_version: Version, latest_full_version: Version, level_bump: LevelBump, prerelease: bool, prerelease_token: str, major_on_zero: bool, allow_zero_version: bool, ) -> Version: """ Using the given versions, along with a given `level_bump`, increment to the next version according to whether or not this is a prerelease. `latest_version` is the most recent version released from this branch's history. `latest_full_version`, the most recent full release (i.e. not a prerelease) in this branch's history. `latest_version` and `latest_full_version` can be the same, but aren't necessarily. """ local_vars = list(locals().items()) logger.log( SemanticReleaseLogLevels.SILLY, "_increment_version: %s", str.join(", ", [f"{k} = {v}" for k, v in local_vars]), ) # Handle variations where the latest version is 0.x.x if latest_version.major == 0: if not allow_zero_version: # Set up default version to be 1.0.0 if currently 0.x.x which means a commented # breaking change is not required to bump to 1.0.0 logger.debug( "Bumping major version as 0.x.x versions are disabled because of allow_zero_version=False" ) level_bump = LevelBump.MAJOR elif not major_on_zero: # if we are a 0.x.y release and have set `major_on_zero`, # breaking changes should increment the minor digit # Correspondingly, we reduce the level that we increment the # version by. logger.debug( "reducing version increment due to 0. version and major_on_zero=False" ) level_bump = min(level_bump, LevelBump.MINOR) logger.debug( "prerelease=%s and the latest version %s %s prerelease", prerelease, latest_version, "is a" if latest_version.is_prerelease else "is not a", ) if level_bump == LevelBump.NO_RELEASE: raise ValueError("level_bump must be at least PRERELEASE_REVISION") if level_bump == LevelBump.PRERELEASE_REVISION and not latest_version.is_prerelease: raise ValueError( "Cannot increment a non-prerelease version with a prerelease level bump" ) # assume we always want to increment the version that is the latest in the branch's history base_version = latest_version # if the current version is a prerelease & we want a new prerelease, then # figure out if we need to bump the prerelease revision or start a new prerelease if latest_version.is_prerelease: # find the change since the last full release because if the current version is a prerelease # then we need to predict properly the next full version diff_with_last_released_version = latest_version - latest_full_version logger.debug( "the diff b/w the latest version '%s' and the latest full release version '%s' is: %s", latest_version, latest_full_version, diff_with_last_released_version, ) # Since the difference is less than or equal to the level bump and we want a new prerelease, # we can abort early and just increment the revision if level_bump <= diff_with_last_released_version: # 6a ii) if level_bump <= the level bump introduced by the previous tag (latest_version) if prerelease: logger.debug( "there has already been at least a %s release since the last full release %s", level_bump, latest_full_version, ) logger.debug("Incrementing the prerelease revision...") new_revision = base_version.to_prerelease( token=prerelease_token, revision=( 1 if latest_version.prerelease_token != prerelease_token else (latest_version.prerelease_revision or 0) + 1 ), ) logger.debug("Incremented %s to %s", base_version, new_revision) return new_revision # When we don't want a prerelease, but the previous version is a prerelease that # had a greater bump than we currently are applying, choose the larger bump instead # as it consumes this bump logger.debug("Finalizing the prerelease version...") return base_version.finalize_version() # Fallthrough to handle all larger level bumps logger.debug( "this release has a greater bump than any change since the last full release, %s", latest_full_version, ) # Fallthrough, if we don't want a prerelease, or if we do but the level bump is greater # # because the current version is a prerelease, we must start from the last full version # Case 1: we identified that the level bump is greater than the change since # the last full release, this will also reset the prerelease revision # Case 2: we don't want a prerelease, so consider only the last full version in history base_version = latest_full_version # From the base version, we can now increment the version according to the level bump # regardless of the prerelease status as bump() handles the reset and pass through logger.debug("Bumping %s with a %s bump", base_version, level_bump) target_next_version = base_version.bump(level_bump) # Converting to/from a prerelease if necessary target_next_version = ( target_next_version.to_prerelease(token=prerelease_token) if prerelease else target_next_version.finalize_version() ) logger.debug("Incremented %s to %s", base_version, target_next_version) return target_next_version def next_version( repo: Repo, translator: VersionTranslator, commit_parser: CommitParser[ParseResult, ParserOptions], allow_zero_version: bool, major_on_zero: bool, prerelease: bool = False, ) -> Version: """ Evaluate the history within `repo`, and based on the tags and commits in the repo history, identify the next semantic version that should be applied to a release """ # Default initial version # Since the translator is configured by the user, we can't guarantee that it will # be able to parse the default version. So we first cast it to a tag using the default # value and the users configured tag format, then parse it back to a version object default_initial_version = translator.from_tag( translator.str_to_tag(DEFAULT_VERSION) ) if default_initial_version is None: # This should never happen, but if it does, it's a bug raise InternalError( "Translator was unable to parse the embedded default version" ) # Step 1. All tags, sorted descending by semver ordering rules all_git_tags_as_versions = tags_and_versions(repo.tags, translator) # Retrieve all commit hashes (regardless of merges) in the current branch's history from repo origin commit_hash_set = { commit.hexsha for commit in _traverse_graph_for_commits(head_commit=repo.active_branch.commit) } # Filter all releases that are not found in the current branch's history historic_versions: list[Version] = [] for tag, version in all_git_tags_as_versions: # TODO: move this to tags_and_versions() function? # Ignore the error that is raised when tag points to a Blob or Tree object rather # than a commit object (tags that point to tags that then point to commits are resolved automatically) with suppress(ValueError): if tag.commit.hexsha in commit_hash_set: historic_versions.append(version) # Step 2. Get the latest final release version in the history of the current branch # or fallback to the default 0.0.0 starting version value if none are found latest_full_release_version = next( filter( lambda version: not version.is_prerelease, historic_versions, ), default_initial_version, ) logger.info( f"The last full version in this branch's history was {latest_full_release_version}" if latest_full_release_version != default_initial_version else "No full releases found in this branch's history" ) # Step 3. Determine the latest release version in the history of the current branch # If we the desired result is a prerelease, we must determine if there was any previous # prerelease in the history of the current branch beyond the latest_full_release_version. # Important to note that, we only consider prereleases that are of the same prerelease token # as the basis of incrementing the prerelease revision. # If we are not looking for a prerelease, this is the same as the last full release. latest_version = ( latest_full_release_version if not prerelease else next( filter( lambda version: all( [ version.is_prerelease, version.prerelease_token == translator.prerelease_token, version >= latest_full_release_version, ] ), historic_versions, ), latest_full_release_version, # default ) ) logger.info("The latest release in this branch's history was %s", latest_version) # Step 4. Walk the git tree to find all commits that have been made since the last release commits_since_last_release = _traverse_graph_for_commits( head_commit=repo.active_branch.commit, latest_release_tag_str=( # NOTE: the default_initial_version should not actually exist on the repository (ie v0.0.0) # so we provide an empty tag string when there are no tags on the repository yet latest_version.as_tag() if latest_version != default_initial_version else "" ), ) logger.info( f"Found {len(commits_since_last_release)} commits since the last release!" if len(commits_since_last_release) > 0 else "No commits found since the last release!" ) # Step 5. apply the parser to each commit in the history (could return multiple results per commit) parsed_results = list(map(commit_parser.parse, commits_since_last_release)) # Step 5A. Accumulate all parsed results into a single list accounting for possible multiple results per commit consolidated_results: list[ParseResult] = reduce( lambda accumulated_results, p_results: [ *accumulated_results, *( # Cast to list if not already a list p_results if isinstance(p_results, list) or type(p_results) == tuple else [p_results] ), ], parsed_results, [], ) # Step 5B. Validation type check for the parser results (important because of possible custom parsers) if not validate_types_in_sequence(consolidated_results, (ParseError, ParsedCommit)): raise TypeError("Unexpected type returned from commit_parser.parse") # Step 5C. Parse the commits to determine the bump level that should be applied parsed_levels: set[LevelBump] = { parsed_result.bump # type: ignore[union-attr] # too complex for type checkers for parsed_result in filter( # Filter out any non-ParsedCommit results (i.e. ParseErrors) lambda parsed_result: isinstance(parsed_result, ParsedCommit), consolidated_results, ) } logger.debug( "parsed the following distinct levels from the commits since the last release: %s", parsed_levels, ) level_bump = max(parsed_levels, default=LevelBump.NO_RELEASE) logger.info("The type of the next release release is: %s", level_bump) if all( [ level_bump is LevelBump.NO_RELEASE, latest_version.major != 0 or allow_zero_version, ] ): logger.info("No release will be made") return latest_version return _increment_version( latest_version=latest_version, latest_full_version=latest_full_release_version, level_bump=level_bump, prerelease=prerelease, prerelease_token=translator.prerelease_token, major_on_zero=major_on_zero, allow_zero_version=allow_zero_version, ) python-semantic-release-10.4.1/src/semantic_release/version/declaration.py000066400000000000000000000070631506116242600267640ustar00rootroot00000000000000from __future__ import annotations # TODO: Remove v11 from abc import ABC, abstractmethod from pathlib import Path from typing import TYPE_CHECKING from deprecated.sphinx import deprecated from semantic_release.globals import logger from semantic_release.version.declarations.enum import VersionStampType from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.declarations.pattern import PatternVersionDeclaration from semantic_release.version.declarations.toml import TomlVersionDeclaration if TYPE_CHECKING: # pragma: no cover from semantic_release.version.version import Version # Globals __all__ = [ "IVersionReplacer", "VersionStampType", "PatternVersionDeclaration", "TomlVersionDeclaration", "VersionDeclarationABC", ] @deprecated( version="9.20.0", reason=str.join( " ", [ "Refactored to composition paradigm using the new IVersionReplacer interface.", "This class will be removed in a future release", ], ), ) class VersionDeclarationABC(ABC): """ ABC for classes representing a location in which a version is declared somewhere within the source tree of the repository """ def __init__(self, path: Path | str, search_text: str) -> None: self.path = Path(path) if not self.path.exists(): raise FileNotFoundError(f"path {self.path.resolve()!r} does not exist") self.search_text = search_text self._content: str | None = None @property def content(self) -> str: """ The content of the source file in which the version is stored. This property is cached in the instance variable _content """ if self._content is None: logger.debug( "No content stored, reading from source file %s", self.path.resolve() ) self._content = self.path.read_text() return self._content @content.deleter def content(self) -> None: logger.debug("resetting instance-stored source file contents") self._content = None @abstractmethod def parse(self) -> set[Version]: """ Return a set of the versions which can be parsed from the file. Because a source can match in multiple places, this method returns a set of matches. Generally, there should only be one element in this set (i.e. even if the version is specified in multiple places, it should be the same version in each place), but enforcing that condition is not mandatory or expected. """ @abstractmethod def replace(self, new_version: Version) -> str: """ Update the versions. This method reads the underlying file, replaces each occurrence of the matched pattern, then writes the updated file. :param new_version: The new version number as a `Version` instance """ def write(self, content: str) -> None: r""" Write new content back to the source path. Use alongside .replace(): >>> class MyVD(VersionDeclarationABC): ... def parse(self): ... ... def replace(self, new_version: Version): ... ... def write(self, content: str): ... >>> new_version = Version.parse("1.2.3") >>> vd = MyVD("path", r"__version__ = (?P\d+\d+\d+)") >>> vd.write(vd.replace(new_version)) """ logger.debug("writing content to %r", self.path.resolve()) self.path.write_text(content) self._content = None python-semantic-release-10.4.1/src/semantic_release/version/declarations/000077500000000000000000000000001506116242600265675ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/version/declarations/__init__.py000066400000000000000000000000001506116242600306660ustar00rootroot00000000000000python-semantic-release-10.4.1/src/semantic_release/version/declarations/enum.py000066400000000000000000000003641506116242600301100ustar00rootroot00000000000000from __future__ import annotations from enum import Enum class VersionStampType(str, Enum): """Enum for the type of version declaration""" # The version is a number format, e.g. 1.2.3 NUMBER_FORMAT = "nf" TAG_FORMAT = "tf" python-semantic-release-10.4.1/src/semantic_release/version/declarations/i_version_replacer.py000066400000000000000000000045601506116242600330200ustar00rootroot00000000000000from __future__ import annotations from abc import ABCMeta, abstractmethod from typing import TYPE_CHECKING if TYPE_CHECKING: # pragma: no cover from pathlib import Path from semantic_release.version.version import Version class IVersionReplacer(metaclass=ABCMeta): """ Interface for subclasses that replace a version string in a source file. Methods generally have a base implementation are implemented here but likely just provide a not-supported message but return gracefully This class cannot be instantiated directly but must be inherited from and implement the designated abstract methods. """ @classmethod def __subclasshook__(cls, subclass: type) -> bool: # Validate that the subclass implements all of the abstract methods. # This supports isinstance and issubclass checks. return bool( cls is IVersionReplacer and all( bool(hasattr(subclass, method) and callable(getattr(subclass, method))) for method in IVersionReplacer.__abstractmethods__ ) ) @abstractmethod def parse(self) -> set[Version]: """ Return a set of the versions which can be parsed from the file. Because a source can match in multiple places, this method returns a set of matches. Generally, there should only be one element in this set (i.e. even if the version is specified in multiple places, it should be the same version in each place), but enforcing that condition is not mandatory or expected. """ raise NotImplementedError # pragma: no cover @abstractmethod def replace(self, new_version: Version) -> str: """ Replace the version in the source content with `new_version`, and return the updated content. :param new_version: The new version number as a `Version` instance """ raise NotImplementedError # pragma: no cover @abstractmethod def update_file_w_version( self, new_version: Version, noop: bool = False ) -> Path | None: """ This method reads the underlying file, replaces each occurrence of the matched pattern, then writes the updated file. :param new_version: The new version number as a `Version` instance """ raise NotImplementedError # pragma: no cover python-semantic-release-10.4.1/src/semantic_release/version/declarations/pattern.py000066400000000000000000000201641506116242600306210ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from re import ( MULTILINE, compile as regexp, error as RegExpError, # noqa: N812 escape as regex_escape, ) from typing import TYPE_CHECKING from deprecated.sphinx import deprecated from semantic_release.cli.util import noop_report from semantic_release.const import SEMVER_REGEX from semantic_release.globals import logger from semantic_release.version.declarations.enum import VersionStampType from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.version import Version if TYPE_CHECKING: # pragma: no cover from re import Match class VersionSwapper: """Callable to replace a version number in a string with a new version number.""" def __init__(self, new_version_str: str, group_match_name: str) -> None: self.version_str = new_version_str self.group_match_name = group_match_name def __call__(self, match: Match[str]) -> str: i, j = match.span() ii, jj = match.span(self.group_match_name) return f"{match.string[i:ii]}{self.version_str}{match.string[jj:j]}" class PatternVersionDeclaration(IVersionReplacer): """ VersionDeclarationABC implementation representing a version number in a particular file. The version number is identified by a regular expression, which should be provided in `search_text`. """ _VERSION_GROUP_NAME = "version" def __init__( self, path: Path | str, search_text: str, stamp_format: VersionStampType ) -> None: self._content: str | None = None self._path = Path(path).resolve() self._stamp_format = stamp_format try: self._search_pattern = regexp(search_text, flags=MULTILINE) except RegExpError as err: raise ValueError( f"Invalid regular expression for search text: {search_text!r}" ) from err if self._VERSION_GROUP_NAME not in self._search_pattern.groupindex: raise ValueError( str.join( " ", [ f"Invalid search text {search_text!r}; must use", f"'{self._VERSION_GROUP_NAME}' as a named group, for example", f"(?P<{self._VERSION_GROUP_NAME}>...) . For more info on named", "groups see https://docs.python.org/3/library/re.html", ], ) ) @property def content(self) -> str: """A cached property that stores the content of the configured source file.""" if self._content is None: logger.debug("No content stored, reading from source file %s", self._path) if not self._path.exists(): raise FileNotFoundError(f"path {self._path!r} does not exist") self._content = self._path.read_text() return self._content @content.deleter def content(self) -> None: self._content = None @deprecated( version="9.20.0", reason="Function is unused and will be removed in a future release", ) def parse(self) -> set[Version]: # pragma: no cover """ Return the versions matching this pattern. Because a pattern can match in multiple places, this method returns a set of matches. Generally, there should only be one element in this set (i.e. even if the version is specified in multiple places, it should be the same version in each place), but it falls on the caller to check for this condition. """ versions = { Version.parse(m.group(self._VERSION_GROUP_NAME)) for m in self._search_pattern.finditer(self.content) } logger.debug( "Parsing current version: path=%r pattern=%r num_matches=%s", self._path.resolve(), self._search_pattern, len(versions), ) return versions def replace(self, new_version: Version) -> str: """ Replace the version in the source content with `new_version`, and return the updated content. :param new_version: The new version number as a `Version` instance """ new_content, n_matches = self._search_pattern.subn( VersionSwapper( new_version_str=( new_version.as_tag() if self._stamp_format == VersionStampType.TAG_FORMAT else str(new_version) ), group_match_name=self._VERSION_GROUP_NAME, ), self.content, ) logger.debug( "path=%r pattern=%r num_matches=%r", self._path, self._search_pattern, n_matches, ) return new_content def update_file_w_version( self, new_version: Version, noop: bool = False ) -> Path | None: if noop: if not self._path.exists(): noop_report( f"FILE NOT FOUND: cannot stamp version in non-existent file {self._path}", ) return None if len(self._search_pattern.findall(self.content)) < 1: noop_report( f"VERSION PATTERN NOT FOUND: no version to stamp in file {self._path}", ) return None return self._path new_content = self.replace(new_version) if new_content == self.content: return None self._path.write_text(new_content) del self.content return self._path @classmethod def from_string_definition( cls, replacement_def: str, tag_format: str ) -> PatternVersionDeclaration: """ create an instance of self from a string representing one item of the "version_variables" list in the configuration """ parts = replacement_def.split(":", maxsplit=2) if len(parts) <= 1: raise ValueError( f"Invalid replacement definition {replacement_def!r}, missing ':'" ) if len(parts) == 2: # apply default version_type of "number_format" (ie. "1.2.3") parts = [*parts, VersionStampType.NUMBER_FORMAT.value] path, variable, version_type = parts try: stamp_type = VersionStampType(version_type) except ValueError as err: raise ValueError( str.join( " ", [ "Invalid stamp type, must be one of:", str.join(", ", [e.value for e in VersionStampType]), ], ) ) from err # DEFAULT: naked (no v-prefixed) semver version value_replace_pattern_str = ( f"(?P<{cls._VERSION_GROUP_NAME}>{SEMVER_REGEX.pattern})" ) if version_type == VersionStampType.TAG_FORMAT.value: tag_parts = tag_format.strip().split(r"{version}", maxsplit=1) value_replace_pattern_str = str.join( "", [ f"(?P<{cls._VERSION_GROUP_NAME}>", regex_escape(tag_parts[0]), SEMVER_REGEX.pattern, (regex_escape(tag_parts[1]) if len(tag_parts) > 1 else ""), ")", ], ) search_text = str.join( "", [ # Supports optional matching quotations around variable name # Negative lookbehind to ensure we don't match part of a variable name f"""(?x)(?P['"])?(?['"])?{value_replace_pattern_str}(?P=quote2)?""", ], ) return cls(path, search_text, stamp_type) python-semantic-release-10.4.1/src/semantic_release/version/declarations/toml.py000066400000000000000000000115031506116242600301140ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from typing import Any, Dict, cast import tomlkit from deprecated.sphinx import deprecated from dotty_dict import Dotty from semantic_release.cli.util import noop_report from semantic_release.globals import logger from semantic_release.version.declarations.enum import VersionStampType from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.version import Version class TomlVersionDeclaration(IVersionReplacer): def __init__( self, path: Path | str, search_text: str, stamp_format: VersionStampType ) -> None: self._content: str | None = None self._path = Path(path).resolve() self._stamp_format = stamp_format self._search_text = search_text @property def content(self) -> str: """A cached property that stores the content of the configured source file.""" if self._content is None: logger.debug("No content stored, reading from source file %s", self._path) if not self._path.exists(): raise FileNotFoundError(f"path {self._path!r} does not exist") self._content = self._path.read_text() return self._content @content.deleter def content(self) -> None: self._content = None @deprecated( version="9.20.0", reason="Function is unused and will be removed in a future release", ) def parse(self) -> set[Version]: # pragma: no cover """Look for the version in the source content""" content = self._load() maybe_version: str = content.get(self._search_text) # type: ignore[return-value] if maybe_version is not None: logger.debug( "Found a key %r that looks like a version (%r)", self._search_text, maybe_version, ) valid_version = Version.parse(maybe_version) return {valid_version} if valid_version else set() # Maybe in future raise error if not found? return set() def replace(self, new_version: Version) -> str: """ Replace the version in the source content with `new_version`, and return the updated content. """ content = self._load() if self._search_text in content: logger.info( "found %r in source file contents, replacing with %s", self._search_text, new_version, ) content[self._search_text] = ( new_version.as_tag() if self._stamp_format == VersionStampType.TAG_FORMAT else str(new_version) ) return tomlkit.dumps(cast(Dict[str, Any], content)) def _load(self) -> Dotty: """Load the content of the source file into a Dotty for easier searching""" return Dotty(tomlkit.loads(self.content)) def update_file_w_version( self, new_version: Version, noop: bool = False ) -> Path | None: if noop: if not self._path.exists(): noop_report( f"FILE NOT FOUND: cannot stamp version in non-existent file {self._path!r}", ) return None if self._search_text not in self._load(): noop_report( f"VERSION PATTERN NOT FOUND: no version to stamp in file {self._path!r}", ) return None return self._path new_content = self.replace(new_version) if new_content == self.content: return None self._path.write_text(new_content) del self.content return self._path @classmethod def from_string_definition(cls, replacement_def: str) -> TomlVersionDeclaration: """ create an instance of self from a string representing one item of the "version_toml" list in the configuration """ parts = replacement_def.split(":", maxsplit=2) if len(parts) <= 1: raise ValueError( f"Invalid TOML replacement definition {replacement_def!r}, missing ':'" ) if len(parts) == 2: # apply default version_type of "number_format" (ie. "1.2.3") parts = [*parts, VersionStampType.NUMBER_FORMAT.value] path, search_text, version_type = parts try: stamp_type = VersionStampType(version_type) except ValueError as err: raise ValueError( str.join( " ", [ "Invalid stamp type, must be one of:", str.join(", ", [e.value for e in VersionStampType]), ], ) ) from err return cls(path, search_text, stamp_type) python-semantic-release-10.4.1/src/semantic_release/version/translator.py000066400000000000000000000057471506116242600266770ustar00rootroot00000000000000from __future__ import annotations import re from semantic_release.const import SEMVER_REGEX from semantic_release.globals import logger from semantic_release.helpers import check_tag_format from semantic_release.version.version import Version class VersionTranslator: """ Class to handle translation from Git tags into their corresponding Version instances. """ _VERSION_REGEX = SEMVER_REGEX @classmethod def _invert_tag_format_to_re(cls, tag_format: str) -> re.Pattern[str]: r""" Unpick the "tag_format" format string and create a regex which can be used to convert a tag to a version string. The following relationship should always hold true: >>> version = "1.2.3-anything.1+at_all.1234" # doesn't matter >>> tag_format = "v-anything_{version}_at-all" # doesn't matter >>> inverted_format = VersionTranslator._invert_tag_format_to_re(tag_format) >>> tag = tag_format.format(version=version) >>> m = inverted_format.match(tag) >>> assert m is not None >>> assert m.expand(r"\g") == version """ pat = re.compile( tag_format.replace(r"{version}", r"(?P.*)"), flags=re.VERBOSE, ) logger.debug("inverted tag_format %r to %r", tag_format, pat.pattern) return pat def __init__( self, tag_format: str = "v{version}", prerelease_token: str = "rc", # noqa: S107 ) -> None: check_tag_format(tag_format) self.tag_format = tag_format self.prerelease_token = prerelease_token self.from_tag_re = self._invert_tag_format_to_re(self.tag_format) def from_string(self, version_str: str) -> Version: """ Return a Version instance from a string. Delegates directly to Version.parse, using the translator's own stored values for tag_format and prerelease """ return Version.parse( version_str, tag_format=self.tag_format, prerelease_token=self.prerelease_token, ) def from_tag(self, tag: str) -> Version | None: """ Return a Version instance from a Git tag, if tag_format matches the format which would have generated the tag from a version. Otherwise return None. For example, a tag of 'v1.2.3' should be matched if `tag_format = 'v{version}`, but not if `tag_format = staging--v{version}`. """ tag_match = self.from_tag_re.match(tag) if not tag_match: return None raw_version_str = tag_match.group("version") return self.from_string(raw_version_str) def str_to_tag(self, version_str: str) -> str: """Formats a version string into a tag name""" return self.tag_format.format(version=version_str) def __repr__(self) -> str: return ( f"{type(self).__qualname__}(tag_format={self.tag_format}, " f"prerelease_token={self.prerelease_token})" ) python-semantic-release-10.4.1/src/semantic_release/version/version.py000066400000000000000000000335471506116242600261720ustar00rootroot00000000000000from __future__ import annotations import re from functools import wraps from itertools import zip_longest from typing import Callable, Union, overload from semantic_release.const import SEMVER_REGEX from semantic_release.enums import LevelBump from semantic_release.errors import InvalidVersion from semantic_release.globals import logger from semantic_release.helpers import check_tag_format # Very heavily inspired by semver.version:_comparator, I don't think there's # a cleaner way to do this # https://github.com/python-semver/python-semver/blob/b5317af9a7e99e6a86df98320e73be72d5adf0de/src/semver/version.py#L32 VersionComparable = Union["Version", str] VersionComparator = Callable[["Version", "Version"], bool] @overload def _comparator( *, type_guard: bool, ) -> Callable[[VersionComparator], VersionComparator]: ... @overload def _comparator( method: VersionComparator, *, type_guard: bool = True ) -> VersionComparator: ... def _comparator( method: VersionComparator | None = None, *, type_guard: bool = True ) -> VersionComparator | Callable[[VersionComparator], VersionComparator]: """ wrap a `Version` binop method to guard types and try to parse strings into Versions. use `type_guard = False` for `__eq__` and `__neq__` to make them return False if the wrong type is used, instead of erroring. """ if method is None: return lambda method: _comparator(method, type_guard=type_guard) @wraps(method) def _wrapper(self: Version, other: VersionComparable) -> bool: if not isinstance(other, (str, Version)): return False if not type_guard else NotImplemented if isinstance(other, str): try: other_v = self.parse( other, tag_format=self.tag_format, prerelease_token=self.prerelease_token, ) except InvalidVersion as ex: raise TypeError(str(ex)) from ex else: other_v = other return method(self, other_v) # type: ignore[misc] return _wrapper class Version: _VERSION_REGEX = SEMVER_REGEX def __init__( self, major: int, minor: int, patch: int, *, prerelease_token: str = "rc", # noqa: S107 prerelease_revision: int | None = None, build_metadata: str = "", tag_format: str = "v{version}", ) -> None: self.major = major self.minor = minor self.patch = patch self.prerelease_token = prerelease_token self.prerelease_revision = prerelease_revision self.build_metadata = build_metadata self._tag_format = tag_format @property def tag_format(self) -> str: return self._tag_format @tag_format.setter def tag_format(self, new_format: str) -> None: check_tag_format(new_format) self._tag_format = new_format # Maybe cache? @classmethod def parse( cls, version_str: str, tag_format: str = "v{version}", prerelease_token: str = "rc", # noqa: S107 ) -> Version: """ Parse version string to a Version instance. Inspired by `semver.version:VersionInfo.parse`, this implementation doesn't allow optional minor and patch versions. :param prerelease_token: will be ignored if the version string is a prerelease, the parsed token from `version_str` will be used instead. """ if not isinstance(version_str, str): raise InvalidVersion(f"{version_str!r} cannot be parsed as a Version") logger.debug("attempting to parse string %r as Version", version_str) match = cls._VERSION_REGEX.fullmatch(version_str) if not match: raise InvalidVersion(f"{version_str!r} is not a valid Version") prerelease = match.group("prerelease") if prerelease: pm = re.match(r"(?P[a-zA-Z0-9-\.]+)\.(?P\d+)", prerelease) if not pm: raise NotImplementedError( f"{cls.__qualname__} currently supports only prereleases " r"of the format (-([a-zA-Z0-9-])\.\(\d+)), for example " r"'1.2.3-my-custom-3rc.4'." ) prerelease_token, prerelease_revision = pm.groups() logger.debug( "parsed prerelease_token %s, prerelease_revision %s from version " "string %s", prerelease_token, prerelease_revision, version_str, ) else: prerelease_revision = None logger.debug("version string %s parsed as a non-prerelease", version_str) build_metadata = match.group("buildmetadata") or "" logger.debug( "parsed build metadata %r from version string %s", build_metadata, version_str, ) return Version( int(match.group("major")), int(match.group("minor")), int(match.group("patch")), prerelease_token=prerelease_token, prerelease_revision=( int(prerelease_revision) if prerelease_revision else None ), build_metadata=build_metadata, tag_format=tag_format, ) @property def is_prerelease(self) -> bool: return self.prerelease_revision is not None def __str__(self) -> str: full = f"{self.major}.{self.minor}.{self.patch}" prerelease = ( f"-{self.prerelease_token}.{self.prerelease_revision}" if self.prerelease_revision else "" ) build_metadata = f"+{self.build_metadata}" if self.build_metadata else "" return f"{full}{prerelease}{build_metadata}" def __repr__(self) -> str: prerelease_token_repr = ( repr(self.prerelease_token) if self.prerelease_token is not None else None ) prerelease_revision_repr = ( repr(self.prerelease_revision) if self.prerelease_revision is not None else None ) build_metadata_repr = ( repr(self.build_metadata) if self.build_metadata is not None else None ) return ( f"{type(self).__qualname__}(" + ", ".join( ( f"major={self.major}", f"minor={self.minor}", f"patch={self.patch}", f"prerelease_token={prerelease_token_repr}", f"prerelease_revision={prerelease_revision_repr}", f"build_metadata={build_metadata_repr}", f"tag_format={self.tag_format!r}", ) ) + ")" ) def as_tag(self) -> str: return self.tag_format.format(version=str(self)) def as_semver_tag(self) -> str: return f"v{self!s}" def bump(self, level: LevelBump) -> Version: """ Return a new Version instance according to the level specified to bump. Note this will intentionally drop the build metadata - that should be added elsewhere for the specific build producing this version. """ if type(level) != LevelBump: raise TypeError(f"Unexpected level {level!r}: expected {LevelBump!r}") logger.debug("performing a %s level bump", level) if level is LevelBump.MAJOR: return Version( self.major + 1, 0, 0, prerelease_token=self.prerelease_token, prerelease_revision=1 if self.is_prerelease else None, tag_format=self.tag_format, ) if level is LevelBump.MINOR: return Version( self.major, self.minor + 1, 0, prerelease_token=self.prerelease_token, prerelease_revision=1 if self.is_prerelease else None, tag_format=self.tag_format, ) if level is LevelBump.PATCH: return Version( self.major, self.minor, self.patch + 1, prerelease_token=self.prerelease_token, prerelease_revision=1 if self.is_prerelease else None, tag_format=self.tag_format, ) if level is LevelBump.PRERELEASE_REVISION: return Version( self.major, self.minor, self.patch, prerelease_token=self.prerelease_token, prerelease_revision=1 if not self.is_prerelease else (self.prerelease_revision or 0) + 1, tag_format=self.tag_format, ) # for consistency, this creates a new instance regardless # only other option is level is LevelBump.NO_RELEASE return Version( self.major, self.minor, self.patch, prerelease_token=self.prerelease_token, prerelease_revision=self.prerelease_revision, tag_format=self.tag_format, ) # Enables Version + LevelBump. __add__ = bump def __hash__(self) -> int: # If we use str(self) we don't capture tag_format, so another # instance with a tag_format "special_{version}_format" would # collide with an instance using "v{version}"/other format return hash(self.__repr__()) @_comparator(type_guard=False) def __eq__(self, other: Version) -> bool: # type: ignore[override] # https://semver.org/#spec-item-11 - # build metadata is not used for comparison return all( getattr(self, attr) == getattr(other, attr) for attr in ( "major", "minor", "patch", "prerelease_token", "prerelease_revision", ) ) @_comparator(type_guard=False) def __neq__(self, other: Version) -> bool: return not self.__eq__(other) # mypy wants to compare signature types with __lt__, # but can't because of the decorator @_comparator def __gt__(self, other: Version) -> bool: # type: ignore[has-type] # https://semver.org/#spec-item-11 - # build metadata is not used for comparison # Note we only support the following versioning currently, which # is a subset of the full spec: # (\d+\.\d+\.\d+)(-\w+\.\d+)?(\+.*)? if self.major != other.major: return self.major > other.major if self.minor != other.minor: return self.minor > other.minor if self.patch != other.patch: return self.patch > other.patch # If just one is a prerelease, then self > other if other is the prerelease # If neither are prereleases then they're equal (so return False) if not (self.is_prerelease and other.is_prerelease): return other.is_prerelease # If both are prereleases... # According to the semver spec 11.4 there are many other rules for # comparing precedence of pre-release versions. Here we just compare # the prerelease tokens, and their revision numbers if self.prerelease_token != other.prerelease_token: for self_tk, other_tk in zip_longest( self.prerelease_token.split("."), other.prerelease_token.split("."), fillvalue=None, ): if self_tk == other_tk: continue if (self_tk is None) ^ (other_tk is None): # Longest token (i.e. non-None) is greater return other_tk is None # Lexical sort, e.g. "rc" > "beta" > "alpha" # we have eliminated that one or both might be None above, # but mypy doesn't recognise this return self_tk > other_tk # type: ignore[operator] # We have eliminated that one or both aren't prereleases by the above return self.prerelease_revision > other.prerelease_revision # type: ignore[operator] # noqa: E501 # mypy wants to compare signature types with __le__, # but can't because of the decorator @_comparator def __ge__(self, other: Version) -> bool: # type: ignore[has-type] return self.__gt__(other) or self.__eq__(other) @_comparator def __lt__(self, other: Version) -> bool: return not (self.__gt__(other) or self.__eq__(other)) @_comparator def __le__(self, other: Version) -> bool: return not self.__gt__(other) def __sub__(self, other: Version) -> LevelBump: if not isinstance(other, Version): return NotImplemented if self.major != other.major: return LevelBump.MAJOR if self.minor != other.minor: return LevelBump.MINOR if self.patch != other.patch: return LevelBump.PATCH if self.is_prerelease ^ other.is_prerelease: return max( self.finalize_version() - other.finalize_version(), LevelBump.PRERELEASE_REVISION, ) if self.prerelease_revision != other.prerelease_revision: return LevelBump.PRERELEASE_REVISION return LevelBump.NO_RELEASE def to_prerelease( self, token: str | None = None, revision: int | None = None ) -> Version: return Version( self.major, self.minor, self.patch, prerelease_token=token or self.prerelease_token, prerelease_revision=(revision or self.prerelease_revision) or 1, tag_format=self.tag_format, ) def finalize_version(self) -> Version: return Version( self.major, self.minor, self.patch, prerelease_token=self.prerelease_token, tag_format=self.tag_format, ) python-semantic-release-10.4.1/tests/000077500000000000000000000000001506116242600175025ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/__init__.py000066400000000000000000000000001506116242600216010ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/conftest.py000066400000000000000000000460731506116242600217130ustar00rootroot00000000000000"""Note: fixtures are stored in the tests/fixtures directory for better organization""" from __future__ import annotations import json import os import sys from datetime import datetime, timedelta, timezone from hashlib import md5 from pathlib import Path from tempfile import NamedTemporaryFile from typing import TYPE_CHECKING, cast from unittest import mock import pytest from click.testing import CliRunner from filelock import FileLock from git import Commit, Repo from semantic_release.version.version import Version from tests.const import PROJ_DIR from tests.fixtures import * from tests.util import copy_dir_tree, remove_dir_tree if TYPE_CHECKING: from tempfile import _TemporaryFileWrapper from typing import Any, Callable, Generator, Optional, Protocol, Sequence, TypedDict from click.testing import Result from filelock import AcquireReturnProxy from git import Actor from tests.fixtures.git_repo import RepoActions class RunCliFn(Protocol): """ Run the CLI with the provided arguments and a clean environment. :param argv: The arguments to pass to the CLI. :type argv: list[str] | None :param env: The environment variables to set for the CLI. :type env: dict[str, str] | None :param invoke_kwargs: Additional arguments to pass to the invoke method. :type invoke_kwargs: dict[str, Any] | None :return: The result of the CLI invocation. :rtype: Result """ def __call__( self, argv: list[str] | None = None, env: dict[str, str] | None = None, invoke_kwargs: dict[str, Any] | None = None, ) -> Result: ... class MakeCommitObjFn(Protocol): def __call__(self, message: str) -> Commit: ... class NetrcFileFn(Protocol): def __call__(self, machine: str) -> _TemporaryFileWrapper[str]: ... class TeardownCachedDirFn(Protocol): def __call__(self, directory: Path) -> Path: ... class FormatDateStrFn(Protocol): def __call__(self, date: datetime) -> str: ... class GetStableDateNowFn(Protocol): def __call__(self) -> datetime: ... class GetMd5ForFileFn(Protocol): def __call__(self, file_path: Path | str) -> str: ... class GetMd5ForSetOfFilesFn(Protocol): """ Generates a hash for a set of files based on their contents This function will automatically filter out any 0-byte files or `__init__.py` files :param: files: A list of file paths to generate a hash for (MUST BE absolute paths) """ def __call__(self, files: Sequence[Path | str]) -> str: ... class GetAuthorizationToBuildRepoCacheFn(Protocol): def __call__(self, repo_name: str) -> AcquireReturnProxy | None: ... class BuildRepoOrCopyCacheFn(Protocol): def __call__( self, repo_name: str, build_spec_hash: str, build_repo_func: Callable[[Path], Sequence[RepoActions]], dest_dir: Path | None = None, ) -> Path: ... class RepoData(TypedDict): build_date: str build_spec_hash: str build_definition: Sequence[RepoActions] class GetCachedRepoDataFn(Protocol): def __call__(self, proj_dirname: str) -> RepoData | None: ... class SetCachedRepoDataFn(Protocol): def __call__(self, proj_dirname: str, data: RepoData) -> None: ... def pytest_addoption(parser: pytest.Parser, pluginmanager: pytest.PytestPluginManager): parser.addoption( "--comprehensive", help="Run full test suite including slow tests", default=False, action="store_true", ) def pytest_configure(config: pytest.Config): """ If no test selection modifications are provided, default to running only unit tests. See `pytest_collection_modifyitems` for more information on test selection modifications. """ user_desired_comprehensive_evaluation = config.getoption("--comprehensive") user_provided_filter = str(config.getoption("-k")) user_provided_markers = str(config.getoption("-m")) root_test_dir = Path(__file__).parent.relative_to(config.rootpath) user_provided_test_path = bool(config.args != [str(root_test_dir)]) # If no options are provided, default to running only unit tests if not any( ( user_desired_comprehensive_evaluation, user_provided_test_path, user_provided_filter, user_provided_markers, ) ): config.option.markexpr = pytest.mark.unit.name @pytest.hookimpl(trylast=True) def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]): """ Test selection modifier based on markers and command line options. Examples -------- pytest only unit tests that are not marked comprehensive are executed pytest --comprehensive all tests are executed pytest -m unit only unit tests that are not marked comprehensive are executed (same as no options) pytest -m e2e only end-to-end tests that are not marked comprehensive are executed pytest -m e2e --comprehensive all end-to-end tests are executed pytest -m "not unit" only tests that are not marked unit or comprehensive are executed pytest -m "not unit" --comprehensive all tests that are not marked unit are executed pytest -k "test_name" only tests that match the substring "test_name" (but not marked comprehensive) are executed pytest -k "test_name" --comprehensive all tests that match the substring "test_name" are executed """ disable_comprehensive_tests = not config.getoption("--comprehensive") comprehensive_test_skip_marker = pytest.mark.skip( reason="comprehensive tests are disabled by default" ) user_provided_filter = str(config.getoption("-k")) if any((disable_comprehensive_tests,)): for item in items: if user_provided_filter and user_provided_filter in item.name: continue if disable_comprehensive_tests and "comprehensive" in item.keywords: item.add_marker(comprehensive_test_skip_marker) @pytest.fixture def cli_runner() -> CliRunner: return CliRunner(mix_stderr=False) @pytest.fixture(scope="session") def run_cli(clean_os_environment: dict[str, str]) -> RunCliFn: def _run_cli( argv: list[str] | None = None, env: dict[str, str] | None = None, invoke_kwargs: dict[str, Any] | None = None, ) -> Result: from semantic_release.cli.commands.main import main cli_runner = CliRunner(mix_stderr=False) env_vars = {**clean_os_environment, **(env or {})} args = ["-vv", *(argv or [])] with mock.patch.dict(os.environ, env_vars, clear=True): # run the CLI with the provided arguments return cli_runner.invoke(main, args=args, **(invoke_kwargs or {})) return _run_cli @pytest.fixture(scope="session") def default_netrc_username() -> str: return "username" @pytest.fixture(scope="session") def default_netrc_password() -> str: return "password" @pytest.fixture(scope="session") def netrc_file( default_netrc_username: str, default_netrc_password: str, ) -> Generator[NetrcFileFn, None, None]: temporary_files: list[str] = [] def _netrc_file(machine: str) -> _TemporaryFileWrapper[str]: ctx_mgr = NamedTemporaryFile("w", delete=False) with ctx_mgr as netrc_fd: temporary_files.append(ctx_mgr.name) netrc_fd.write(f"machine {machine}{os.linesep}") netrc_fd.write(f"login {default_netrc_username}{os.linesep}") netrc_fd.write(f"password {default_netrc_password}{os.linesep}") netrc_fd.flush() return ctx_mgr try: yield _netrc_file finally: for temp_file in temporary_files: os.unlink(temp_file) @pytest.fixture(scope="session") def stable_today_date() -> datetime: curr_time = datetime.now(timezone.utc).astimezone() est_test_completion = curr_time + timedelta(hours=1) # exaggeration starting_day_of_year = curr_time.timetuple().tm_yday ending_day_of_year = est_test_completion.timetuple().tm_yday if starting_day_of_year < ending_day_of_year: return est_test_completion return curr_time @pytest.fixture(scope="session") def stable_now_date(stable_today_date: datetime) -> GetStableDateNowFn: def _stable_now_date() -> datetime: curr_time = datetime.now(timezone.utc).astimezone() return stable_today_date.replace( minute=curr_time.minute, second=curr_time.second, microsecond=curr_time.microsecond, ) return _stable_now_date @pytest.fixture(scope="session") def format_date_str() -> FormatDateStrFn: """Formats a date as how it would appear in the changelog (Must match local timezone)""" def _format_date_str(date: datetime) -> str: return date.strftime("%Y-%m-%d") return _format_date_str @pytest.fixture(scope="session") def today_date_str( stable_today_date: datetime, format_date_str: FormatDateStrFn ) -> str: """Today's Date formatted as how it would appear in the changelog (matches local timezone)""" return format_date_str(stable_today_date) @pytest.fixture(scope="session") def cached_files_dir(request: pytest.FixtureRequest) -> Path: return request.config.cache.mkdir("psr-cached-repos") @pytest.fixture(scope="session") def get_authorization_to_build_repo_cache( tmp_path_factory: pytest.TempPathFactory, worker_id: str ) -> GetAuthorizationToBuildRepoCacheFn: def _get_authorization_to_build_repo_cache( repo_name: str, ) -> AcquireReturnProxy | None: if worker_id == "master": # not executing with multiple workers via xdist, so just continue return None # get the temp directory shared by all workers root_tmp_dir = tmp_path_factory.getbasetemp().parent return FileLock(root_tmp_dir / f"{repo_name}.lock").acquire( timeout=30, blocking=True ) return _get_authorization_to_build_repo_cache @pytest.fixture(scope="session") def get_cached_repo_data(request: pytest.FixtureRequest) -> GetCachedRepoDataFn: def _get_cached_repo_data(proj_dirname: str) -> RepoData | None: cache_key = f"psr/repos/{proj_dirname}" return cast("Optional[RepoData]", request.config.cache.get(cache_key, None)) return _get_cached_repo_data @pytest.fixture(scope="session") def set_cached_repo_data(request: pytest.FixtureRequest) -> SetCachedRepoDataFn: def magic_serializer(obj: Any) -> Any: if isinstance(obj, Path): return obj.__fspath__() if isinstance(obj, Version): return obj.__dict__ return obj def _set_cached_repo_data(proj_dirname: str, data: RepoData) -> None: cache_key = f"psr/repos/{proj_dirname}" request.config.cache.set( cache_key, json.loads(json.dumps(data, default=magic_serializer)), ) return _set_cached_repo_data @pytest.fixture(scope="session") def build_repo_or_copy_cache( cached_files_dir: Path, today_date_str: str, stable_now_date: GetStableDateNowFn, get_cached_repo_data: GetCachedRepoDataFn, set_cached_repo_data: SetCachedRepoDataFn, get_authorization_to_build_repo_cache: GetAuthorizationToBuildRepoCacheFn, ) -> BuildRepoOrCopyCacheFn: log_file = cached_files_dir.joinpath("repo-build.log") log_file_lock = FileLock(log_file.with_suffix(f"{log_file.suffix}.lock"), timeout=2) def _build_repo_w_cache_checking( repo_name: str, build_spec_hash: str, build_repo_func: Callable[[Path], Sequence[RepoActions]], dest_dir: Path | None = None, ) -> Path: # Blocking mechanism to synchronize xdist workers # Runs before the cache is checked because the cache will be set once the build is complete filelock = get_authorization_to_build_repo_cache(repo_name) cached_repo_data = get_cached_repo_data(repo_name) cached_repo_path = cached_files_dir.joinpath(repo_name) # Determine if the build spec has changed since the last cached build unmodified_build_spec = bool( cached_repo_data and cached_repo_data["build_spec_hash"] == build_spec_hash ) if not unmodified_build_spec or not cached_repo_path.exists(): # Cache miss, so build the repo (make sure its clean first) remove_dir_tree(cached_repo_path, force=True) cached_repo_path.mkdir(parents=True, exist_ok=True) build_msg = f"Building cached project files for {repo_name}" with log_file_lock, log_file.open(mode="a") as afd: afd.write(f"{stable_now_date().isoformat()}: {build_msg}...\n") try: # Try to build repository but catch any errors so that it doesn't cascade through all tests # do to an unreleased lock build_definition = build_repo_func(cached_repo_path) except Exception: remove_dir_tree(cached_repo_path, force=True) if filelock: filelock.lock.release() with log_file_lock, log_file.open(mode="a") as afd: afd.write( f"{stable_now_date().isoformat()}: {build_msg}...FAILED\n" ) raise # Marks the date when the cached repo was created set_cached_repo_data( repo_name, { "build_date": today_date_str, "build_spec_hash": build_spec_hash, "build_definition": build_definition, }, ) with log_file_lock, log_file.open(mode="a") as afd: afd.write(f"{stable_now_date().isoformat()}: {build_msg}...DONE\n") if filelock: filelock.lock.release() if dest_dir: copy_dir_tree(cached_repo_path, dest_dir) return dest_dir return cached_repo_path return _build_repo_w_cache_checking @pytest.fixture(scope="session") def teardown_cached_dir() -> Generator[TeardownCachedDirFn, None, None]: directories: list[Path] = [] def _teardown_cached_dir(directory: Path | str) -> Path: directories.append(Path(directory)) return directories[-1] try: yield _teardown_cached_dir finally: # clean up any registered cached directories for directory in directories: if directory.exists(): remove_dir_tree(directory, force=True) @pytest.fixture(scope="session") def make_commit_obj( commit_author: Actor, stable_now_date: GetStableDateNowFn ) -> MakeCommitObjFn: def _make_commit(message: str) -> Commit: commit_timestamp = round(stable_now_date().timestamp()) return Commit( repo=Repo(), binsha=Commit.NULL_BIN_SHA, message=message, author=commit_author, authored_date=commit_timestamp, committer=commit_author, committed_date=commit_timestamp, parents=[], ) return _make_commit @pytest.fixture(scope="session") def get_md5_for_file() -> GetMd5ForFileFn: in_memory_cache = {} def _get_md5_for_file(file_path: Path | str) -> str: file_path = Path(file_path) rel_file_path = str(file_path.relative_to(PROJ_DIR)) if rel_file_path not in in_memory_cache: in_memory_cache[rel_file_path] = md5( # noqa: S324, not using hash for security file_path.read_bytes() ).hexdigest() return in_memory_cache[rel_file_path] return _get_md5_for_file @pytest.fixture(scope="session") def get_md5_for_set_of_files( get_md5_for_file: GetMd5ForFileFn, ) -> GetMd5ForSetOfFilesFn: in_memory_cache = {} def _get_md5_for_set_of_files(files: Sequence[Path | str]) -> str: # cast to a filtered and unique set of Path objects file_dependencies = sorted( set( filter( lambda file_path: file_path.name != "__init__.py" and file_path.stat().st_size > 0, (Path(f).absolute().resolve() for f in files), ) ) ) # create a hashable key of all dependencies to store the combined files hash cache_key = tuple( [str(file.relative_to(PROJ_DIR)) for file in file_dependencies] ) # check if we have done this before if cache_key not in in_memory_cache: # since we haven't done this before, generate the hash for each file file_hashes = [get_md5_for_file(file) for file in file_dependencies] # combine the hashes into a string and then hash the result and store it in_memory_cache[cache_key] = md5( # noqa: S324, not using hash for security str.join("\n", file_hashes).encode() ).hexdigest() # return the stored calculated hash for the set return in_memory_cache[cache_key] return _get_md5_for_set_of_files @pytest.fixture(scope="session") def clean_os_environment() -> dict[str, str]: return dict( filter( lambda k_v: k_v[1] is not None, # type: ignore[arg-type] { "PATH": os.getenv("PATH"), "HOME": os.getenv("HOME"), **( {} if sys.platform != "win32" else { # Windows Required variables "ALLUSERSAPPDATA": os.getenv("ALLUSERSAPPDATA"), "ALLUSERSPROFILE": os.getenv("ALLUSERSPROFILE"), "APPDATA": os.getenv("APPDATA"), "COMMONPROGRAMFILES": os.getenv("COMMONPROGRAMFILES"), "COMMONPROGRAMFILES(X86)": os.getenv("COMMONPROGRAMFILES(X86)"), "DEFAULTUSERPROFILE": os.getenv("DEFAULTUSERPROFILE"), "HOMEPATH": os.getenv("HOMEPATH"), "PATHEXT": os.getenv("PATHEXT"), "PROFILESFOLDER": os.getenv("PROFILESFOLDER"), "PROGRAMFILES": os.getenv("PROGRAMFILES"), "PROGRAMFILES(X86)": os.getenv("PROGRAMFILES(X86)"), "SYSTEM": os.getenv("SYSTEM"), "SYSTEM16": os.getenv("SYSTEM16"), "SYSTEM32": os.getenv("SYSTEM32"), "SYSTEMDRIVE": os.getenv("SYSTEMDRIVE"), "SYSTEMROOT": os.getenv("SYSTEMROOT"), "TEMP": os.getenv("TEMP"), "TMP": os.getenv("TMP"), "USERPROFILE": os.getenv("USERPROFILE"), "USERSID": os.getenv("USERSID"), "USERNAME": os.getenv("USERNAME"), "WINDIR": os.getenv("WINDIR"), } ), }.items(), ) ) python-semantic-release-10.4.1/tests/const.py000066400000000000000000000204531506116242600212060ustar00rootroot00000000000000from enum import Enum from pathlib import Path import git import semantic_release from semantic_release.cli.commands.main import Cli PROJ_DIR = Path(__file__).parent.parent.absolute().resolve() class RepoActionStep(str, Enum): CONFIGURE = "CONFIGURE" CONFIGURE_MONOREPO = "CONFIGURE_MONOREPO" CREATE_MONOREPO = "CREATE_MONOREPO" CHANGE_DIRECTORY = "CHANGE_DIRECTORY" WRITE_CHANGELOGS = "WRITE_CHANGELOGS" GIT_CHECKOUT = "GIT_CHECKOUT" GIT_COMMIT = "GIT_COMMIT" GIT_MERGE = "GIT_MERGE" GIT_SQUASH = "GIT_SQUASH" GIT_TAG = "GIT_TAG" RELEASE = "RELEASE" MAKE_COMMITS = "MAKE_COMMITS" A_FULL_VERSION_STRING = "1.11.567" A_PRERELEASE_VERSION_STRING = "2.3.4-dev.23" A_FULL_VERSION_STRING_WITH_BUILD_METADATA = "4.2.3+build.12345" EXAMPLE_REPO_OWNER = "example_owner" EXAMPLE_REPO_NAME = "example_repo" EXAMPLE_HVCS_DOMAIN = "example.com" DEFAULT_BRANCH_NAME = "main" INITIAL_COMMIT_MESSAGE = "Initial commit" MAIN_PROG_NAME = str(semantic_release.__name__).replace("_", "-") SUCCESS_EXIT_CODE = 0 CHANGELOG_SUBCMD = Cli.SubCmds.CHANGELOG.name.lower() GENERATE_CONFIG_SUBCMD = Cli.SubCmds.GENERATE_CONFIG.name.lower() PUBLISH_SUBCMD = Cli.SubCmds.PUBLISH.name.lower() VERSION_SUBCMD = Cli.SubCmds.VERSION.name.lower() NULL_HEX_SHA = git.Object.NULL_HEX_SHA COMMIT_MESSAGE = "{version}\n\nAutomatically generated by python-semantic-release\n" SUPPORTED_ISSUE_CLOSURE_PREFIXES = [ "Close", "Closes", "Closed", "Closing", "Fix", "Fixes", "Fixed", "Fixing", "Resolve", "Resolves", "Resolved", "Resolving", "Implement", "Implements", "Implemented", "Implementing", ] CONVENTIONAL_COMMITS_CHORE = ("ci: added a commit lint job\n",) # Different in-scope commits that produce a certain release type CONVENTIONAL_COMMITS_PATCH = ( *CONVENTIONAL_COMMITS_CHORE, "fix: fixed voltage in the flux capacitor\n", ) CONVENTIONAL_COMMITS_MINOR = ( *CONVENTIONAL_COMMITS_PATCH, "feat: last minute rush order\n", ) # Take previous commits and insert a breaking change CONVENTIONAL_COMMITS_MAJOR = ( *CONVENTIONAL_COMMITS_MINOR, "fix!: big change\n\nBREAKING CHANGE: reworked something for previous feature\n", ) EMOJI_COMMITS_CHORE = ( ":broom: updated lint & code style\n", ":none: updated ci pipeline\n", ) EMOJI_COMMITS_PATCH = ( *EMOJI_COMMITS_CHORE, ":bug: fixed voltage in the flux capacitor\n", ) EMOJI_COMMITS_MINOR = ( *EMOJI_COMMITS_PATCH, ":sparkles::pencil: docs for something special\n", # Emoji in description should not be used to evaluate change type ":sparkles: last minute rush order\n\nGood thing we're 10x developers :boom:\n", ) EMOJI_COMMITS_MAJOR = ( *EMOJI_COMMITS_MINOR, ":boom: Move to the blockchain\n", ) # Note - the scipy commit fixtures for commits that should evaluate to the various scopes # are in tests/fixtures/scipy EXAMPLE_PROJECT_NAME = "example" EXAMPLE_PROJECT_VERSION = "0.0.0" EXAMPLE_PROJECT_LICENSE = "MIT" # Uses the internal defaults of semantic-release unless otherwise needed for testing # modify the pyproject toml as necessary for the test using update_pyproject_toml() # and derivative fixtures EXAMPLE_PYPROJECT_TOML_CONTENT = rf""" [project] license-expression = "{EXAMPLE_PROJECT_LICENSE}" [tool.poetry] name = "{EXAMPLE_PROJECT_NAME}" version = "{EXAMPLE_PROJECT_VERSION}" description = "Just an example" license = "{EXAMPLE_PROJECT_LICENSE}" authors = ["semantic-release "] readme = "README.md" classifiers = [ "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 3 :: Only" ] [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.semantic_release] version_variables = [ "src/{EXAMPLE_PROJECT_NAME}/_version.py:__version__", ] version_toml = ["pyproject.toml:tool.poetry.version"] """.lstrip() EXAMPLE_SETUP_CFG_CONTENT = rf""" [metadata] name = example version = {EXAMPLE_PROJECT_VERSION} description = Just an example really long_description = file: README.md long_description_content_type = text/markdown author = semantic-release author_email = not-a.real@email.com url = https://github.com/python-semantic-release/python-semantic-release python_requires = >=3.7 [options] zip_safe = True include_package_data = True packages = find: install_requires = PyYAML==6.0 pydantic==1.9.0 [options.extras_require] dev = tox twine==3.1.1 test = pytest pytest-cov pytest-mock pytest-aiohttp lint = flake8 black>=22.6.0 isort>=5.10.1 [options.packages.find] exclude = test* [bdist_wheel] universal = 1 [coverage:run] omit = */tests/* [tools:pytest] python_files = tests/test_*.py tests/**/test_*.py [isort] skip = .tox,venv default_section = THIRDPARTY known_first_party = {EXAMPLE_PROJECT_NAME},tests multi_line_output=3 include_trailing_comma=True force_grid_wrap=0 use_parentheses=True line_length=88 [flake8] max-line-length = 88 """.lstrip() EXAMPLE_SETUP_PY_CONTENT = rf""" import re import sys from setuptools import find_packages, setup def _read_long_description(): try: with open("readme.rst") as fd: return fd.read() except Exception: return None with open("{EXAMPLE_PROJECT_NAME}/_version.py", "r") as fd: version = re.search( r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE ).group(1) try: from semantic_release import setup_hook setup_hook(sys.argv) except ImportError: pass setup( name="{EXAMPLE_PROJECT_NAME}", version="{EXAMPLE_PROJECT_VERSION}", url="http://github.com/python-semantic-release/python-semantic-release", author="semantic-release", author_email="not-a.real@email.com", description="Just an example", long_description=_read_long_description(), packages=find_packages(exclude=("tests",)), license="MIT", install_requires=[ "click>=7,<9", "click_log>=0.3,<1", "gitpython>=3.0.8,<4", "invoke>=1.4.1,<2", "semver>=2.10,<3", "twine>=3,<4", "requests>=2.25,<3", "wheel", "python-gitlab>=2,<4", # tomlkit used to be pinned to 0.7.0 # See https://github.com/python-semantic-release/python-semantic-release/issues/336 # and https://github.com/python-semantic-release/python-semantic-release/pull/337 # and https://github.com/python-semantic-release/python-semantic-release/issues/491 "tomlkit~=0.10", "dotty-dict>=1.3.0,<2", "dataclasses==0.8; python_version < '3.7.0'", "packaging", ], extras_require={{ "test": [ "coverage>=5,<6", "pytest>=5,<6", "pytest-xdist>=1,<2", "pytest-mock>=2,<3", "pytest-lazy-fixture~=0.6.3", "responses==0.13.3", "mock==1.3.0", ], "docs": ["Sphinx==1.3.6", "Jinja2==3.0.3"], "dev": ["tox", "isort", "black"], "mypy": ["mypy", "types-requests"], }}, include_package_data=True, classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", ], ) """.lstrip() EXAMPLE_CHANGELOG_MD_CONTENT = r""" # CHANGELOG ## v1.0.0 * Various bugfixes, security enhancements * Extra cookies to enhance your experience * ~Removed~ simplified cookie opt-out handling logic """.lstrip() EXAMPLE_CHANGELOG_RST_CONTENT = r""" .. _changelog: ========= CHANGELOG ========= .. example project base changelog .. _changelog-v1.0.0: v1.0.0 (1970-01-01) =================== * Various bugfixes, security enhancements * Extra cookies to enhance your experience * ~Removed~ simplified cookie opt-out handling logic """.lstrip() EXAMPLE_RELEASE_NOTES_TEMPLATE = r""" ## What's Changed {% for type_, commits in release["elements"] | dictsort %}{{ "### %s" | format(type_ | title) }}{% if type_ != "unknown" %}{% for commit in commits %}{{ "* %s" | format(commit.descriptions[0] | trim) }}{% endfor %}{% endif %}{% endfor %} """.lstrip() # noqa: E501 RELEASE_NOTES = "# Release Notes" DEFAULT_MERGE_STRATEGY_OPTION = "theirs" python-semantic-release-10.4.1/tests/e2e/000077500000000000000000000000001506116242600201555ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/__init__.py000066400000000000000000000000001506116242600222540ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_changelog/000077500000000000000000000000001506116242600227275ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_changelog/__init__.py000066400000000000000000000000001506116242600250260ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_changelog/test_changelog.py000066400000000000000000001241401506116242600262710ustar00rootroot00000000000000from __future__ import annotations import os from textwrap import dedent from typing import TYPE_CHECKING from unittest import mock import pytest import requests_mock from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from requests import Session import semantic_release.hvcs.github from semantic_release.changelog.context import ChangelogMode from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.hvcs.github import Github from tests.const import ( CHANGELOG_SUBCMD, EXAMPLE_HVCS_DOMAIN, EXAMPLE_RELEASE_NOTES_TEMPLATE, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER, MAIN_PROG_NAME, ) from tests.fixtures.example_project import ( change_to_ex_proj_dir, changelog_md_file, changelog_rst_file, default_md_changelog_insertion_flag, default_rst_changelog_insertion_flag, example_changelog_md, example_changelog_rst, ) from tests.fixtures.repos import ( repo_w_git_flow_conventional_commits, repo_w_git_flow_emoji_commits, repo_w_git_flow_scipy_commits, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits, repo_w_github_flow_w_default_release_channel_conventional_commits, repo_w_github_flow_w_default_release_channel_emoji_commits, repo_w_github_flow_w_default_release_channel_scipy_commits, repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_github_flow_w_feature_release_channel_emoji_commits, repo_w_github_flow_w_feature_release_channel_scipy_commits, repo_w_no_tags_conventional_commits, repo_w_no_tags_emoji_commits, repo_w_no_tags_scipy_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_emoji_commits, repo_w_trunk_only_n_prereleases_conventional_commits, repo_w_trunk_only_n_prereleases_emoji_commits, repo_w_trunk_only_n_prereleases_scipy_commits, repo_w_trunk_only_scipy_commits, ) from tests.util import ( add_text_to_file, assert_exit_code, assert_successful_exit_code, get_func_qual_name, get_release_history_from_context, ) if TYPE_CHECKING: from pathlib import Path from typing import TypedDict from requests_mock import Mocker from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from tests.conftest import RunCliFn from tests.e2e.conftest import RetrieveRuntimeContextFn from tests.fixtures.example_project import ( ExProjectDir, UpdatePyprojectTomlFn, UseReleaseNotesTemplateFn, ) from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuiltRepoResult, CommitConvention, CommitDef, CommitNReturnChangelogEntryFn, GetCommitDefFn, GetRepoDefinitionFn, GetVersionsFromRepoBuildDefFn, ) class Commit2Section(TypedDict): conventional: Commit2SectionCommit emoji: Commit2SectionCommit scipy: Commit2SectionCommit class Commit2SectionCommit(TypedDict): commit: CommitDef section: str @pytest.mark.parametrize("arg0", [None, "--post-to-release-tag"]) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in ( # Only need to test when it has tags or no tags # DO NOT need to consider all repo types as it doesn't change no-op behavior repo_w_no_tags_conventional_commits.__name__, repo_w_trunk_only_conventional_commits.__name__, ) ], ) def test_changelog_noop_is_noop( repo_result: BuiltRepoResult, arg0: str | None, run_cli: RunCliFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ): repo = repo_result["repo"] repo_def = repo_result["definition"] released_versions = get_versions_from_repo_build_def(repo_def) version_str = released_versions[-1] if len(released_versions) > 0 else None repo.git.reset("--hard") # Set up a requests HTTP session so we can catch the HTTP calls and ensure # they're made session = Session() session.hooks = {"response": [lambda r, *_, **__: r.raise_for_status()]} mock_adapter = requests_mock.Adapter() mock_adapter.register_uri( method=requests_mock.ANY, url=requests_mock.ANY, json={"id": 10001} ) session.mount("http://", mock_adapter) session.mount("https://", mock_adapter) with mock.patch( get_func_qual_name(semantic_release.hvcs.github.build_requests_session), return_value=session, ), requests_mock.Mocker(session=session) as mocker: args = [arg0, f"v{version_str}"] if version_str and arg0 else [] cli_cmd = [MAIN_PROG_NAME, "--noop", CHANGELOG_SUBCMD, *args] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not repo.git.status(short=True) if args: assert not mocker.called assert not mock_adapter.called @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result", [ *[ lazy_fixture(repo_fixture) for repo_fixture in [ # All commit types and one without a release repo_w_no_tags_conventional_commits.__name__, repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], *[ pytest.param(lazy_fixture(repo_fixture), marks=pytest.mark.comprehensive) for repo_fixture in [ # repo_w_no_tags_conventional_commits.__name__, repo_w_no_tags_emoji_commits.__name__, repo_w_no_tags_scipy_commits.__name__, # repo_w_trunk_only_conventional_commits.__name__, # repo_w_trunk_only_emoji_commits.__name__, # repo_w_trunk_only_scipy_commits.__name__, repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_emoji_commits.__name__, repo_w_trunk_only_n_prereleases_scipy_commits.__name__, repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, repo_w_github_flow_w_default_release_channel_emoji_commits.__name__, repo_w_github_flow_w_default_release_channel_scipy_commits.__name__, repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_emoji_commits.__name__, repo_w_github_flow_w_feature_release_channel_scipy_commits.__name__, repo_w_git_flow_conventional_commits.__name__, repo_w_git_flow_emoji_commits.__name__, repo_w_git_flow_scipy_commits.__name__, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__, ] ], ], ) def test_changelog_content_regenerated( repo_result: BuiltRepoResult, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, insertion_flag: str, ): # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.INIT.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Because we are in init mode, the insertion flag is not present in the changelog # we must take it out manually because our repo generation fixture includes it automatically with changelog_file.open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison expected_changelog_content = ( rfd.read().replace(f"{insertion_flag}{os.linesep}", "").replace("\r", "") ) # Remove the changelog and then check that we can regenerate it os.remove(str(changelog_file.resolve())) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that the changelog file was re-created assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.usefixtures(change_to_ex_proj_dir.__name__) def test_changelog_content_regenerated_masked_initial_release( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_tags: GetRepoDefinitionFn, example_project_dir: ExProjectDir, run_cli: RunCliFn, changelog_file: Path, insertion_flag: str, ): build_definition = get_repo_definition_4_trunk_only_repo_w_tags( commit_type="conventional", mask_initial_release=True, extra_configs={ "tool.semantic_release.changelog.default_templates.changelog_file": str( changelog_file.name ), "tool.semantic_release.changelog.mode": ChangelogMode.INIT.value, }, ) build_repo_from_definition(example_project_dir, build_definition) # Because we are in init mode, the insertion flag is not present in the changelog # we must take it out manually because our repo generation fixture includes it automatically with changelog_file.open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison expected_changelog_content = ( rfd.read().replace(f"{insertion_flag}{os.linesep}", "").replace("\r", "") ) # Remove the changelog and then check that we can regenerate it os.remove(str(changelog_file.resolve())) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that the changelog file was re-created assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file", [ lazy_fixture(example_changelog_md.__name__), lazy_fixture(example_changelog_rst.__name__), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_unchanged( repo_result: BuiltRepoResult, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, ): """ Given that the changelog file already exists for the current release, When the changelog command is run in "update" mode, Then the changelog file is not modified. """ # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Capture the expected changelog content expected_changelog_content = changelog_file.read_text() # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that the changelog file was re-created assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file", [ lazy_fixture(example_changelog_md.__name__), lazy_fixture(example_changelog_rst.__name__), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ repo_w_no_tags_conventional_commits.__name__, repo_w_no_tags_emoji_commits.__name__, repo_w_no_tags_scipy_commits.__name__, repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_no_prev_changelog( repo_result: BuiltRepoResult, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, ): """ Given that the changelog file does not exist, When the changelog command is run in "update" mode, Then the changelog file is initialized with the default content. """ # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Capture the expected changelog content expected_changelog_content = changelog_file.read_text() # Remove any previous changelog to update os.remove(str(changelog_file.resolve())) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that the changelog file was re-created assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_no_flag( repo_result: BuiltRepoResult, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, insertion_flag: str, ): """ Given a changelog template without the insertion flag, When the changelog command is run in "update" mode, Then the changelog is not modified. """ # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Remove the insertion flag from the changelog changelog_file.write_text( changelog_file.read_text().replace( f"{insertion_flag}\n", "", 1, ) ) # Capture the expected changelog content expected_changelog_content = changelog_file.read_text() # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_format, changelog_file", [ ( ChangelogOutputFormat.MARKDOWN, lazy_fixture(changelog_md_file.__name__), ), ( ChangelogOutputFormat.RESTRUCTURED_TEXT, lazy_fixture(changelog_rst_file.__name__), ), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ # MUST HAVE at least 2 tags! repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_no_header( repo_result: BuiltRepoResult, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_format: ChangelogOutputFormat, changelog_file: Path, default_md_changelog_insertion_flag: str, default_rst_changelog_insertion_flag: str, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ): """ Given a changelog template with the insertion flag at the beginning of the file, When the changelog command is run in "update" mode, Then the changelog is rebuilt with the latest release prepended to the existing content. """ repo = repo_result["repo"] # Mappings of correct fixtures to use based on the changelog format insertion_flags = { ChangelogOutputFormat.MARKDOWN: ( "# CHANGELOG{ls}{ls}{flag}".format( ls=os.linesep, flag=default_md_changelog_insertion_flag, ) ), ChangelogOutputFormat.RESTRUCTURED_TEXT: ( ".. _changelog:{ls}{ls}{h1_border}{ls}CHANGELOG{ls}{h1_border}{ls}{ls}{flag}".format( ls=os.linesep, h1_border="=" * 9, flag=default_rst_changelog_insertion_flag, ) ), } # Select the correct insertion flag based on the format insertion_flag = insertion_flags[changelog_format] # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) update_pyproject_toml( "tool.semantic_release.changelog.insertion_flag", insertion_flag, ) # Capture the expected changelog content of current release with changelog_file.open(newline=os.linesep) as rfd: expected_changelog_content = rfd.read() # Reset changelog file to last release previous_tag = f'v{get_versions_from_repo_build_def(repo_result["definition"])[-2]}' repo.git.checkout(previous_tag, "--", str(changelog_file.name)) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_format, changelog_file, insertion_flag", [ ( ChangelogOutputFormat.MARKDOWN, lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( ChangelogOutputFormat.RESTRUCTURED_TEXT, lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ # MUST HAVE at least 2 tags! repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_no_footer( repo_result: BuiltRepoResult, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_format: ChangelogOutputFormat, changelog_file: Path, insertion_flag: str, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ): """ Given a changelog template with the insertion flag at the end of the file, When the changelog command is run in "update" mode, Then the changelog is rebuilt with only the latest release. """ repo_result["repo"] # Mappings of correct fixtures to use based on the changelog format prev_version_tag = ( f"v{get_versions_from_repo_build_def(repo_result['definition'])[-2]}" ) split_flags = { ChangelogOutputFormat.MARKDOWN: f"\n\n## {prev_version_tag}", ChangelogOutputFormat.RESTRUCTURED_TEXT: f"\n\n.. _changelog-{prev_version_tag}:", } # Select the correct variable based on the format split_flag = split_flags[changelog_format] # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Capture the expected changelog content of current release (w/ universal newlines) # NOTE: universal newlines is fine because we use our split flag above is also universal expected_changelog_content = changelog_file.read_text().split(split_flag)[0] # Determine the contents to save while truncating the rest with changelog_file.open(newline=os.linesep) as rfd: # read file contents grabbing only the text before the insertion flag truncated_contents = str.join( "", [ rfd.read().split(insertion_flag)[0], insertion_flag, os.linesep, ], ) # Remove any text after the insertion flag # force output to not perform any newline translations with changelog_file.open(mode="w", newline="") as wfd: # overwrite the file with truncated contents wfd.write(truncated_contents) wfd.flush() # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content only includes the latest release as there # is no previous release information as the insertion flag is at the end of the file assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ # Must not have a single release/tag repo_w_no_tags_conventional_commits.__name__, repo_w_no_tags_emoji_commits.__name__, repo_w_no_tags_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_no_releases( repo_result: BuiltRepoResult, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, insertion_flag: str, ): """ Given the repository has no releases and the user has provided a initialized changelog, When the changelog command is run in "update" mode, Then the changelog is populated with unreleased changes. """ # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Custom text to maintain (must be different from the default) custom_text = "---{ls}{ls}Custom footer text{ls}".format(ls=os.linesep) # Capture and modify the current changelog content to become the expected output # We much use os.linesep here since the insertion flag is os-specific with changelog_file.open(newline=os.linesep) as rfd: initial_changelog_parts = rfd.read().split(insertion_flag) # content is os-specific because of the insertion flag & how we read the original file expected_changelog_content = str.join( insertion_flag, [ initial_changelog_parts[0], str.join( os.linesep, [ initial_changelog_parts[1], "", custom_text, ], ), ], ) # Grab the Unreleased changelog & create the initalized user changelog # force output to not perform any newline translations with changelog_file.open(mode="w", newline="") as wfd: wfd.write( str.join( insertion_flag, [initial_changelog_parts[0], f"{os.linesep * 2}{custom_text}"], ) ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() # Capture the new changelog content (os aware because of expected content) with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Check that the changelog footer is maintained and updated with Unreleased info assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_format, changelog_file, insertion_flag", [ ( ChangelogOutputFormat.MARKDOWN, lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( ChangelogOutputFormat.RESTRUCTURED_TEXT, lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result, commit_type", [ (lazy_fixture(repo_fixture), repo_fixture.split("_")[-2]) for repo_fixture in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_unreleased_n_released( repo_result: BuiltRepoResult, commit_type: CommitConvention, changelog_format: ChangelogOutputFormat, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, example_git_ssh_url: str, file_in_repo: str, commit_n_rtn_changelog_entry: CommitNReturnChangelogEntryFn, changelog_file: Path, insertion_flag: str, get_commit_def_of_conventional_commit: GetCommitDefFn[ConventionalCommitParser], get_commit_def_of_emoji_commit: GetCommitDefFn[EmojiCommitParser], get_commit_def_of_scipy_commit: GetCommitDefFn[ScipyCommitParser], default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, ): """ Given there are unreleased changes and a previous release in the changelog, When the changelog command is run in "update" mode, Then the changelog is only updated with the unreleased changes. """ repo = repo_result["repo"] # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) commit_n_section: Commit2Section = { "conventional": { "commit": get_commit_def_of_conventional_commit( "perf: improve the performance of the application", parser=default_conventional_parser, ), "section": "Performance Improvements", }, "emoji": { "commit": get_commit_def_of_emoji_commit( ":zap: improve the performance of the application", parser=default_emoji_parser, ), "section": ":zap:", }, "scipy": { "commit": get_commit_def_of_scipy_commit( "MAINT: fix an issue", parser=default_scipy_parser, ), "section": "Fix", }, } # Custom text to maintain (must be different from the default) custom_text = "---\n\nCustom footer text\n" # Update the changelog with the custom footer text changelog_file.write_text( str.join( "\n\n", [ changelog_file.read_text(), custom_text, ], ) ) # Capture the current changelog content so we can estimate the expected output # We much use os.linesep here since the insertion flag is os-specific with changelog_file.open(newline=os.linesep) as rfd: initial_changelog_parts = rfd.read().split(insertion_flag) # Make a change to the repo to create unreleased changes add_text_to_file(repo, file_in_repo) unreleased_commit_entry = commit_n_rtn_changelog_entry( repo, commit_n_section[commit_type]["commit"], ) with mock.patch.dict(os.environ, {}, clear=True): hvcs = Github(example_git_ssh_url, hvcs_domain=EXAMPLE_HVCS_DOMAIN) assert hvcs.repo_name # force caching of repo values (ignoring the env) unreleased_change_variants = { ChangelogOutputFormat.MARKDOWN: dedent( f""" ## Unreleased ### {commit_n_section[commit_type]["section"]} - {unreleased_commit_entry['desc'].capitalize()} ([`{unreleased_commit_entry['sha'][:7]}`]({hvcs.commit_hash_url(unreleased_commit_entry['sha'])})) """ ), ChangelogOutputFormat.RESTRUCTURED_TEXT: dedent( f""" .. _changelog-unreleased: Unreleased ========== {commit_n_section[commit_type]["section"]} {"-" * len(commit_n_section[commit_type]["section"])} * {unreleased_commit_entry['desc'].capitalize()} (`{unreleased_commit_entry['sha'][:7]}`_) .. _{unreleased_commit_entry['sha'][:7]}: {hvcs.commit_hash_url(unreleased_commit_entry['sha'])} """ ), } # Normalize line endings to the OS-specific line ending unreleased_changes = str.join( os.linesep, [ line.replace("\r", "") for line in unreleased_change_variants[changelog_format].split("\n") ], ) # Generate the expected changelog content (os aware because of insertion flag & initial parts) expected_changelog_content = str.join( insertion_flag, [ initial_changelog_parts[0], str.join( "", [ os.linesep, # Unreleased changes unreleased_changes, # Previous release notes initial_changelog_parts[1], ], ), ], ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() # Capture the new changelog content (os aware because of expected content) with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content # Just need to test that it works for "a" project, not all @pytest.mark.usefixtures(repo_w_trunk_only_n_prereleases_conventional_commits.__name__) @pytest.mark.parametrize( "args", [("--post-to-release-tag", "v1.99.91910000000000000000000000000")] ) def test_changelog_release_tag_not_in_history( args: list[str], run_cli: RunCliFn, ): # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD, *args] result = run_cli(cli_cmd[1:]) # Evaluate assert_exit_code(2, result, cli_cmd) assert "not in release history" in result.stderr.lower() @pytest.mark.usefixtures(repo_w_trunk_only_n_prereleases_conventional_commits.__name__) @pytest.mark.parametrize( "args", [ ("--post-to-release-tag", "v0.1.0"), # first release ("--post-to-release-tag", "v0.1.1-rc.1"), # second release ("--post-to-release-tag", "v0.2.0"), # latest release ], ) def test_changelog_post_to_release(args: list[str], run_cli: RunCliFn): # Set up a requests HTTP session so we can catch the HTTP calls and ensure they're # made session = Session() session.hooks = {"response": [lambda r, *_, **__: r.raise_for_status()]} mock_adapter = requests_mock.Adapter() mock_adapter.register_uri( method=requests_mock.ANY, url=requests_mock.ANY, json={"id": 10001} ) session.mount("http://", mock_adapter) session.mount("https://", mock_adapter) expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=f"https://{EXAMPLE_HVCS_DOMAIN}/api/v3", # GitHub API URL owner=EXAMPLE_REPO_OWNER, repo_name=EXAMPLE_REPO_NAME, ) # Patch out env vars that affect changelog URLs but only get set in e.g. # Github actions with mock.patch( # Patching the specific module's reference to the build_requests_session function f"{semantic_release.hvcs.github.__name__}.{semantic_release.hvcs.github.build_requests_session.__name__}", return_value=session, ) as build_requests_session_mock: # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD, *args] result = run_cli( cli_cmd[1:], env={ "CI": "true", "VIRTUAL_ENV": os.getenv("VIRTUAL_ENV", "./.venv"), }, ) # Evaluate assert_successful_exit_code(result, cli_cmd) assert build_requests_session_mock.called assert mock_adapter.called assert mock_adapter.last_request is not None assert expected_request_url == mock_adapter.last_request.url @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_n_prereleases_conventional_commits.__name__)], ) def test_custom_release_notes_template( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, use_release_notes_template: UseReleaseNotesTemplateFn, retrieve_runtime_context: RetrieveRuntimeContextFn, post_mocker: Mocker, run_cli: RunCliFn, ) -> None: """Verify the template `.release_notes.md.j2` from `template_dir` is used.""" expected_call_count = 1 version = get_versions_from_repo_build_def(repo_result["definition"])[-1] # Setup use_release_notes_template() runtime_context = retrieve_runtime_context(repo_result["repo"]) release_history = get_release_history_from_context(runtime_context) release = release_history.released[version] tag = runtime_context.version_translator.str_to_tag(str(version)) expected_release_notes = ( runtime_context.template_environment.from_string(EXAMPLE_RELEASE_NOTES_TEMPLATE) .render(release=release) .rstrip() + os.linesep ) # ensure normalized line endings after render expected_release_notes = str.join( os.linesep, str.split(expected_release_notes.replace("\r", ""), "\n"), ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD, "--post-to-release-tag", tag] result = run_cli(cli_cmd[1:]) # Assert assert_successful_exit_code(result, cli_cmd) assert expected_call_count == post_mocker.call_count assert post_mocker.last_request is not None actual_notes = post_mocker.last_request.json()["body"] assert expected_release_notes == actual_notes @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_changelog_default_on_empty_template_dir( example_changelog_md: Path, changelog_template_dir: Path, example_project_template_dir: Path, update_pyproject_toml: UpdatePyprojectTomlFn, run_cli: RunCliFn, ): # Setup: Make sure default changelog doesn't already exist example_changelog_md.unlink(missing_ok=True) # Setup: Create an empty template directory example_project_template_dir.mkdir(parents=True, exist_ok=True) # Setup: Set the templates directory in the configuration update_pyproject_toml( "tool.semantic_release.changelog.template_dir", str(changelog_template_dir), ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that our default changelog was created because the user's template dir was empty assert example_changelog_md.exists() @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_changelog_default_on_incorrect_config_template_file( example_changelog_md: Path, changelog_template_dir: Path, example_project_template_dir: Path, update_pyproject_toml: UpdatePyprojectTomlFn, run_cli: RunCliFn, ): # Setup: Make sure default changelog doesn't already exist example_changelog_md.unlink(missing_ok=True) # Setup: Create a file of the same name as the template directory example_project_template_dir.parent.mkdir(parents=True, exist_ok=True) example_project_template_dir.touch() # Setup: Set the templates directory as the file in the configuration update_pyproject_toml( "tool.semantic_release.changelog.template_dir", str(changelog_template_dir), ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that our default changelog was created because the user's template dir was empty assert example_changelog_md.exists() @pytest.mark.parametrize("bad_changelog_file_str", ("/etc/passwd", "../../.ssh/id_rsa")) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_changelog_prevent_malicious_path_traversal_file( update_pyproject_toml: UpdatePyprojectTomlFn, bad_changelog_file_str: str, run_cli: RunCliFn, ): # Setup: A malicious path traversal filepath outside of the repository update_pyproject_toml( "tool.semantic_release.changelog.changelog_file", bad_changelog_file_str, ) # Act cli_cmd = [MAIN_PROG_NAME, "--noop", CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_exit_code(1, result, cli_cmd) assert ( "Changelog file destination must be inside of the repository directory." in result.stderr ) @pytest.mark.parametrize("template_dir_path", ("~/.ssh", "../../.ssh")) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_changelog_prevent_external_path_traversal_dir( update_pyproject_toml: UpdatePyprojectTomlFn, template_dir_path: str, run_cli: RunCliFn, ): # Setup: A malicious path traversal filepath outside of the repository update_pyproject_toml( "tool.semantic_release.changelog.template_dir", template_dir_path, ) # Act cli_cmd = [MAIN_PROG_NAME, "--noop", CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_exit_code(1, result, cli_cmd) assert ( "Template directory must be inside of the repository directory." in result.stderr ) python-semantic-release-10.4.1/tests/e2e/cmd_changelog/test_changelog_custom_parser.py000066400000000000000000000062041506116242600312370ustar00rootroot00000000000000from __future__ import annotations import os from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.changelog.context import ChangelogMode from tests.const import CHANGELOG_SUBCMD, MAIN_PROG_NAME from tests.fixtures.repos import repo_w_no_tags_conventional_commits from tests.util import ( CustomConventionalParserWithIgnorePatterns, assert_successful_exit_code, ) if TYPE_CHECKING: from pathlib import Path from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from tests.conftest import RunCliFn from tests.fixtures.example_project import UpdatePyprojectTomlFn, UseCustomParserFn from tests.fixtures.git_repo import BuiltRepoResult, GetCommitDefFn @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)] ) def test_changelog_custom_parser_remove_from_changelog( repo_result: BuiltRepoResult, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, use_custom_parser: UseCustomParserFn, get_commit_def_of_conventional_commit: GetCommitDefFn[ConventionalCommitParser], changelog_md_file: Path, default_md_changelog_insertion_flag: str, default_conventional_parser: ConventionalCommitParser, ): """ Given when a changelog filtering custom parser is configured When provided a commit message that matches the ignore syntax Then the commit message is not included in the resulting changelog """ ignored_commit_def = get_commit_def_of_conventional_commit( "chore: do not include me in the changelog", parser=default_conventional_parser, ) # Because we are in init mode, the insertion flag is not present in the changelog # we must take it out manually because our repo generation fixture includes it automatically with changelog_md_file.open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison expected_changelog_content = ( rfd.read() .replace(f"{default_md_changelog_insertion_flag}{os.linesep}", "") .replace("\r", "") ) # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.INIT.value ) use_custom_parser( f"{CustomConventionalParserWithIgnorePatterns.__module__}:{CustomConventionalParserWithIgnorePatterns.__name__}" ) # Setup: add the commit to be ignored repo_result["repo"].git.commit(m=ignored_commit_def["msg"], a=True) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Take measurement after action actual_content = changelog_md_file.read_text() # Evaluate assert_successful_exit_code(result, cli_cmd) # Verify that the changelog content does not include our commit assert ignored_commit_def["desc"] not in actual_content # Verify that the changelog content has not changed assert expected_changelog_content == actual_content python-semantic-release-10.4.1/tests/e2e/cmd_changelog/test_changelog_parsing.py000066400000000000000000000106651506116242600300220ustar00rootroot00000000000000from __future__ import annotations import os import shutil from pathlib import Path from re import MULTILINE, compile as regexp from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.changelog.context import ChangelogMode from semantic_release.cli.const import JINJA2_EXTENSION from tests.const import CHANGELOG_SUBCMD, MAIN_PROG_NAME from tests.fixtures.example_project import ( default_changelog_md_template, default_changelog_rst_template, default_md_changelog_insertion_flag, default_rst_changelog_insertion_flag, example_changelog_md, example_changelog_rst, ) from tests.fixtures.repos.git_flow import ( repo_w_git_flow_conventional_commits, repo_w_git_flow_scipy_commits, ) from tests.util import assert_successful_exit_code if TYPE_CHECKING: from tests.conftest import RunCliFn from tests.fixtures.example_project import UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult @pytest.mark.parametrize( "changelog_file, insertion_flag, default_changelog_template, changes_tpl_file", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), lazy_fixture(default_changelog_md_template.__name__), Path(".components", "changes.md.j2"), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), lazy_fixture(default_changelog_rst_template.__name__), Path(".components", "changes.rst.j2"), ), ], ) @pytest.mark.parametrize( "repo_result", [ pytest.param( lazy_fixture(repo_fixture_name), marks=pytest.mark.comprehensive, ) for repo_fixture_name in [ repo_w_git_flow_conventional_commits.__name__, repo_w_git_flow_scipy_commits.__name__, ] ], ) def test_changelog_parsing_ignore_merge_commits( run_cli: RunCliFn, repo_result: BuiltRepoResult, update_pyproject_toml: UpdatePyprojectTomlFn, example_project_template_dir: Path, changelog_file: Path, insertion_flag: str, default_changelog_template: Path, changes_tpl_file: Path, ): repo = repo_result["repo"] expected_changelog_content = changelog_file.read_text() update_pyproject_toml( "tool.semantic_release.commit_parser_options.ignore_merge_commits", True ) update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.insertion_flag", insertion_flag, ) update_pyproject_toml( "tool.semantic_release.changelog.template_dir", str(example_project_template_dir.relative_to(repo.working_dir)), ) update_pyproject_toml( "tool.semantic_release.changelog.exclude_commit_patterns", [ r"""Initial Commit.*""", ], ) # Force custom changelog to be a copy of the default changelog shutil.copytree( src=default_changelog_template.parent, dst=example_project_template_dir, dirs_exist_ok=True, ) # Remove the "unknown" filter from the changelog template to enable Merge commits patch = regexp( r'^(#}{% *for type_, commits in commit_objects) if type_ != "unknown"', MULTILINE, ) changes_file = example_project_template_dir.joinpath(changes_tpl_file) changes_file.write_text(patch.sub(r"\1", changes_file.read_text())) # Make sure the prev_changelog_file is the same as the current changelog changelog_tpl_file = example_project_template_dir.joinpath( changelog_file.name ).with_suffix(str.join("", [changelog_file.suffix, JINJA2_EXTENSION])) changelog_tpl_file.write_text( regexp(r"= ctx.prev_changelog_file").sub( rf'= "{changelog_file.name}"', changelog_tpl_file.read_text() ) ) # Remove the changelog to force re-generation with new configurations os.remove(str(changelog_file.resolve())) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert expected_changelog_content == changelog_file.read_text() python-semantic-release-10.4.1/tests/e2e/cmd_changelog/test_changelog_release_notes.py000066400000000000000000000264351506116242600312110ustar00rootroot00000000000000from __future__ import annotations from datetime import datetime from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures import lf as lazy_fixture from tests.const import CHANGELOG_SUBCMD, EXAMPLE_PROJECT_LICENSE, MAIN_PROG_NAME from tests.fixtures.repos import ( repo_w_github_flow_w_default_release_channel_conventional_commits, repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_emoji_commits, repo_w_trunk_only_scipy_commits, ) from tests.util import assert_successful_exit_code if TYPE_CHECKING: from requests_mock import Mocker from tests.conftest import GetCachedRepoDataFn, GetStableDateNowFn, RunCliFn from tests.fixtures.example_project import UpdatePyprojectTomlFn from tests.fixtures.git_repo import ( BuiltRepoResult, GenerateDefaultReleaseNotesFromDefFn, GetCfgValueFromDefFn, GetHvcsClientFromRepoDefFn, GetVersionsFromRepoBuildDefFn, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture_name) for repo_fixture_name in [ repo_w_trunk_only_conventional_commits.__name__, ] ], ) def test_changelog_latest_release_notes( repo_result: BuiltRepoResult, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, get_hvcs_client_from_repo_def: GetHvcsClientFromRepoDefFn, run_cli: RunCliFn, post_mocker: Mocker, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, generate_default_release_notes_from_def: GenerateDefaultReleaseNotesFromDefFn, ): # Setup repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] repo_actions_per_version = split_repo_actions_by_release_tags( repo_definition=repo_def ) all_versions = get_versions_from_repo_build_def(repo_def) latest_release_version = all_versions[-1] release_tag = tag_format_str.format(version=latest_release_version) expected_release_notes = generate_default_release_notes_from_def( version_actions=repo_actions_per_version[latest_release_version], hvcs=get_hvcs_client_from_repo_def(repo_def), previous_version=(all_versions[-2] if len(all_versions) > 1 else None), license_name=EXAMPLE_PROJECT_LICENSE, mask_initial_release=get_cfg_value_from_def(repo_def, "mask_initial_release"), ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD, "--post-to-release-tag", release_tag] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert post_mocker.call_count == 1 assert post_mocker.last_request is not None request_body = post_mocker.last_request.json() assert "body" in request_body actual_posted_notes = request_body["body"] assert expected_release_notes == actual_posted_notes @pytest.mark.parametrize( "repo_result, mask_initial_release", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), True, ), pytest.param( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), False, marks=pytest.mark.comprehensive, ), *[ pytest.param( lazy_fixture(repo_fixture_name), mask_initial_release, marks=pytest.mark.comprehensive, ) for mask_initial_release in [True, False] for repo_fixture_name in [ repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, ] ], ], ) def test_changelog_previous_release_notes( repo_result: BuiltRepoResult, mask_initial_release: bool, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, get_hvcs_client_from_repo_def: GetHvcsClientFromRepoDefFn, run_cli: RunCliFn, post_mocker: Mocker, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, generate_default_release_notes_from_def: GenerateDefaultReleaseNotesFromDefFn, update_pyproject_toml: UpdatePyprojectTomlFn, ): # Setup repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] repo_actions_per_version = split_repo_actions_by_release_tags( repo_definition=repo_def ) # Extract all versions except for the latest one all_prev_versions = get_versions_from_repo_build_def(repo_def)[:-1] latest_release_version = all_prev_versions[-1] release_tag = tag_format_str.format(version=latest_release_version) expected_release_notes = generate_default_release_notes_from_def( version_actions=repo_actions_per_version[latest_release_version], hvcs=get_hvcs_client_from_repo_def(repo_def), previous_version=( all_prev_versions[-2] if len(all_prev_versions) > 1 else None ), license_name=EXAMPLE_PROJECT_LICENSE, mask_initial_release=mask_initial_release, ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.mask_initial_release", mask_initial_release, ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD, "--post-to-release-tag", release_tag] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert post_mocker.call_count == 1 assert post_mocker.last_request is not None request_body = post_mocker.last_request.json() assert "body" in request_body actual_posted_notes = request_body["body"] assert expected_release_notes == actual_posted_notes @pytest.mark.parametrize( "repo_result, repo_fixture_name, mask_initial_release, license_name", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), repo_w_trunk_only_conventional_commits.__name__, True, "BSD-3-Clause", ), pytest.param( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), repo_w_trunk_only_conventional_commits.__name__, False, "BSD-3-Clause", marks=pytest.mark.comprehensive, ), *[ pytest.param( lazy_fixture(repo_fixture_name), repo_fixture_name, mask_initial_release, "BSD-3-Clause", marks=pytest.mark.comprehensive, ) for mask_initial_release in [True, False] for repo_fixture_name in [ repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, # Add more repos here if needed # github_flow had issues as its hard to generate the release notes from squash commits repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, ] ], ], ) def test_changelog_release_notes_license_change( repo_result: BuiltRepoResult, license_name: str, mask_initial_release: bool, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, get_hvcs_client_from_repo_def: GetHvcsClientFromRepoDefFn, run_cli: RunCliFn, post_mocker: Mocker, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, generate_default_release_notes_from_def: GenerateDefaultReleaseNotesFromDefFn, update_pyproject_toml: UpdatePyprojectTomlFn, repo_fixture_name: str, stable_now_date: GetStableDateNowFn, get_cached_repo_data: GetCachedRepoDataFn, ): # Setup repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] if not (repo_build_data := get_cached_repo_data(repo_fixture_name)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) repo_actions_per_version = split_repo_actions_by_release_tags( repo_definition=repo_def, ) # Extract all versions all_versions = get_versions_from_repo_build_def(repo_def) assert len(all_versions) > 1 latest_release_version = all_versions[-1] previous_release_version = all_versions[-2] latest_release_tag = tag_format_str.format(version=latest_release_version) prev_release_tag = tag_format_str.format(version=previous_release_version) expected_release_notes = generate_default_release_notes_from_def( version_actions=repo_actions_per_version[latest_release_version], hvcs=get_hvcs_client_from_repo_def(repo_def), previous_version=(previous_release_version if len(all_versions) > 1 else None), license_name=license_name, mask_initial_release=mask_initial_release, ) expected_prev_release_notes = generate_default_release_notes_from_def( version_actions=repo_actions_per_version[previous_release_version], hvcs=get_hvcs_client_from_repo_def(repo_def), previous_version=(all_versions[-3] if len(all_versions) > 2 else None), license_name=EXAMPLE_PROJECT_LICENSE, mask_initial_release=mask_initial_release, ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.mask_initial_release", mask_initial_release, ) update_pyproject_toml("project.license-expression", license_name) git_repo = repo_result["repo"] git_repo.git.commit( amend=True, a=True, no_edit=True, date=now_datetime.isoformat(timespec="seconds"), ) with git_repo.git.custom_environment( GIT_COMMITTER_DATE=now_datetime.isoformat(timespec="seconds"), ): git_repo.git.tag(latest_release_tag, d=True) git_repo.git.tag(latest_release_tag, a=True, m=latest_release_tag) # Act cli_cmd = [ MAIN_PROG_NAME, CHANGELOG_SUBCMD, "--post-to-release-tag", latest_release_tag, ] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert post_mocker.call_count == 1 assert post_mocker.last_request is not None request_body = post_mocker.last_request.json() assert "body" in request_body actual_new_posted_notes = request_body["body"] assert expected_release_notes == actual_new_posted_notes # Generate the previous release notes cli_cmd = [ MAIN_PROG_NAME, CHANGELOG_SUBCMD, "--post-to-release-tag", prev_release_tag, ] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert post_mocker.call_count == 2 assert post_mocker.last_request is not None request_body = post_mocker.last_request.json() assert "body" in request_body actual_prev_posted_notes = request_body["body"] assert expected_prev_release_notes == actual_prev_posted_notes assert actual_prev_posted_notes != actual_new_posted_notes python-semantic-release-10.4.1/tests/e2e/cmd_config/000077500000000000000000000000001506116242600222455ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_config/__init__.py000066400000000000000000000000001506116242600243440ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_config/test_generate_config.py000066400000000000000000000125171506116242600270030ustar00rootroot00000000000000from __future__ import annotations import json from typing import TYPE_CHECKING import pytest import tomlkit from semantic_release.cli.config import RawConfig from tests.const import GENERATE_CONFIG_SUBCMD, MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.repos import repo_w_no_tags_conventional_commits from tests.util import assert_successful_exit_code if TYPE_CHECKING: from pathlib import Path from typing import Any from tests.conftest import RunCliFn from tests.fixtures.example_project import ExProjectDir @pytest.fixture def raw_config_dict() -> dict[str, Any]: return RawConfig().model_dump(mode="json", exclude_none=True) @pytest.mark.parametrize("args", [(), ("--format", "toml"), ("--format", "TOML")]) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_generate_config_toml( run_cli: RunCliFn, args: tuple[str], raw_config_dict: dict[str, Any], example_project_dir: ExProjectDir, ): # Setup: Generate the expected configuration as a TOML string expected_config_as_str = tomlkit.dumps( {"semantic_release": raw_config_dict} ).strip() # Act: Print the generated configuration to stdout cli_cmd = [MAIN_PROG_NAME, GENERATE_CONFIG_SUBCMD, *args] result = run_cli(cli_cmd[1:]) # Evaluate: Check that the command ran successfully and that the output matches the expected configuration assert_successful_exit_code(result, cli_cmd) assert expected_config_as_str == result.output.strip() # Setup: Write the generated configuration to a file config_file = "releaserc.toml" example_project_dir.joinpath(config_file).write_text(result.output) # Act: Validate that the generated config is a valid configuration for PSR cli_cmd = [ MAIN_PROG_NAME, "--noop", "--strict", "-c", config_file, VERSION_SUBCMD, "--print", ] result = run_cli(cli_cmd[1:]) # Evaluate: Check that the version command in noop mode ran successfully # which means PSR loaded the configuration successfully assert_successful_exit_code(result, cli_cmd) @pytest.mark.parametrize("args", [("--format", "json"), ("--format", "JSON")]) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_generate_config_json( run_cli: RunCliFn, args: tuple[str], raw_config_dict: dict[str, Any], example_project_dir: ExProjectDir, ): # Setup: Generate the expected configuration as a JSON string expected_config_as_str = json.dumps( {"semantic_release": raw_config_dict}, indent=4 ).strip() # Act: Print the generated configuration to stdout cli_cmd = [MAIN_PROG_NAME, GENERATE_CONFIG_SUBCMD, *args] result = run_cli(cli_cmd[1:]) # Evaluate: Check that the command ran successfully and that the output matches the expected configuration assert_successful_exit_code(result, cli_cmd) assert expected_config_as_str == result.output.strip() # Setup: Write the generated configuration to a file config_file = "releaserc.json" example_project_dir.joinpath(config_file).write_text(result.output) # Act: Validate that the generated config is a valid configuration for PSR cli_cmd = [ MAIN_PROG_NAME, "--noop", "--strict", "-c", config_file, VERSION_SUBCMD, "--print", ] result = run_cli(cli_cmd[1:]) # Evaluate: Check that the version command in noop mode ran successfully # which means PSR loaded the configuration successfully assert_successful_exit_code(result, cli_cmd) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_generate_config_pyproject_toml( run_cli: RunCliFn, raw_config_dict: dict[str, Any], example_pyproject_toml: Path, ): # Setup: Generate the expected configuration as a TOML string according to PEP 518 expected_config_as_str = tomlkit.dumps( {"tool": {"semantic_release": raw_config_dict}} ).strip() # Setup: Remove any current configuration from pyproject.toml pyproject_config = tomlkit.loads(example_pyproject_toml.read_text(encoding="utf-8")) pyproject_config.get("tool", {}).pop("semantic_release", None) example_pyproject_toml.write_text(tomlkit.dumps(pyproject_config)) # Act: Print the generated configuration to stdout cli_cmd = [ MAIN_PROG_NAME, GENERATE_CONFIG_SUBCMD, "--format", "toml", "--pyproject", ] result = run_cli(cli_cmd[1:]) # Evaluate: Check that the command ran successfully and that the output matches the expected configuration assert_successful_exit_code(result, cli_cmd) assert expected_config_as_str == result.output.strip() # Setup: Write the generated configuration to a file example_pyproject_toml.write_text( str.join( "\n\n", [ example_pyproject_toml.read_text(encoding="utf-8").strip(), result.output, ], ) ) # Act: Validate that the generated config is a valid configuration for PSR cli_cmd = [MAIN_PROG_NAME, "--noop", "--strict", VERSION_SUBCMD, "--print"] result = run_cli(cli_cmd[1:]) # Evaluate: Check that the version command in noop mode ran successfully # which means PSR loaded the configuration successfully assert_successful_exit_code(result, cli_cmd) python-semantic-release-10.4.1/tests/e2e/cmd_publish/000077500000000000000000000000001506116242600224465ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_publish/__init__.py000066400000000000000000000000001506116242600245450ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_publish/test_publish.py000066400000000000000000000056751506116242600255420ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING from unittest import mock import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.hvcs import Github from tests.const import MAIN_PROG_NAME, PUBLISH_SUBCMD from tests.fixtures.repos import repo_w_trunk_only_conventional_commits from tests.util import assert_exit_code, assert_successful_exit_code if TYPE_CHECKING: from typing import Sequence from tests.conftest import RunCliFn from tests.fixtures.git_repo import BuiltRepoResult, GetVersionsFromRepoBuildDefFn @pytest.mark.parametrize("cmd_args", [(), ("--tag", "latest")]) @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)] ) def test_publish_latest_uses_latest_tag( repo_result: BuiltRepoResult, run_cli: RunCliFn, cmd_args: Sequence[str], get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ): latest_version = get_versions_from_repo_build_def(repo_result["definition"])[-1] latest_tag = f"v{latest_version}" with mock.patch.object( Github, Github.upload_dists.__name__, ) as mocked_upload_dists: cli_cmd = [MAIN_PROG_NAME, PUBLISH_SUBCMD, *cmd_args] # Act result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) mocked_upload_dists.assert_called_once_with(tag=latest_tag, dist_glob="dist/*") @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)] ) def test_publish_to_tag_uses_tag( repo_result: BuiltRepoResult, run_cli: RunCliFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ): # Testing a non-latest tag to distinguish from test_publish_latest_uses_latest_tag() previous_version = get_versions_from_repo_build_def(repo_result["definition"])[-2] previous_tag = f"v{previous_version}" with mock.patch.object(Github, Github.upload_dists.__name__) as mocked_upload_dists: cli_cmd = [MAIN_PROG_NAME, PUBLISH_SUBCMD, "--tag", previous_tag] # Act result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) mocked_upload_dists.assert_called_once_with( tag=previous_tag, dist_glob="dist/*" ) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_publish_fails_on_nonexistant_tag(run_cli: RunCliFn): non_existant_tag = "nonexistant-tag" with mock.patch.object(Github, Github.upload_dists.__name__) as mocked_upload_dists: cli_cmd = [MAIN_PROG_NAME, PUBLISH_SUBCMD, "--tag", non_existant_tag] # Act result = run_cli(cli_cmd[1:]) # Evaluate assert_exit_code(1, result, cli_cmd) assert ( f"Tag '{non_existant_tag}' not found in local repository!" in result.stderr ) mocked_upload_dists.assert_not_called() python-semantic-release-10.4.1/tests/e2e/cmd_version/000077500000000000000000000000001506116242600224655ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/__init__.py000066400000000000000000000000001506116242600245640ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/000077500000000000000000000000001506116242600251755ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/__init__.py000066400000000000000000000000001506116242600272740ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/conftest.py000066400000000000000000000116051506116242600273770ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING import pytest from git import Repo from semantic_release.hvcs.github import Github from tests.const import MAIN_PROG_NAME, VERSION_SUBCMD from tests.util import assert_successful_exit_code if TYPE_CHECKING: from typing import Protocol, Sequence from click.testing import Result from tests.conftest import RunCliFn from tests.fixtures.example_project import UpdatePyprojectTomlFn from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, RepoActionConfigure, RepoActionConfigureMonorepo, RepoActionCreateMonorepo, ) class InitMirrorRepo4RebuildFn(Protocol): def __call__( self, mirror_repo_dir: Path, configuration_steps: Sequence[ RepoActionConfigure | RepoActionCreateMonorepo | RepoActionConfigureMonorepo ], files_to_remove: Sequence[Path], ) -> Path: ... class RunPSReleaseFn(Protocol): def __call__( self, next_version_str: str, git_repo: Repo, config_toml_path: Path = ..., ) -> Result: ... @pytest.fixture(scope="session") def init_mirror_repo_for_rebuild( build_repo_from_definition: BuildRepoFromDefinitionFn, ) -> InitMirrorRepo4RebuildFn: def _init_mirror_repo_for_rebuild( mirror_repo_dir: Path, configuration_steps: Sequence[ RepoActionConfigure | RepoActionCreateMonorepo | RepoActionConfigureMonorepo ], files_to_remove: Sequence[Path], ) -> Path: # Create the mirror repo directory mirror_repo_dir.mkdir(exist_ok=True, parents=True) # Initialize mirror repository build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=configuration_steps, ) with Repo(mirror_repo_dir) as mirror_git_repo: for filepath in files_to_remove: file = ( (mirror_git_repo.working_dir / filepath).resolve().absolute() if not filepath.is_absolute() else filepath ) if ( Path(mirror_git_repo.working_dir) not in file.parents or not file.exists() ): continue mirror_git_repo.git.rm(str(file), force=True) return mirror_repo_dir return _init_mirror_repo_for_rebuild @pytest.fixture(scope="session") def run_psr_release( run_cli: RunCliFn, changelog_rst_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_file: Path, ) -> RunPSReleaseFn: base_version_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD] write_changelog_only_cmd = [ *base_version_cmd, "--changelog", "--no-commit", "--no-tag", "--skip-build", ] def _run_psr_release( next_version_str: str, git_repo: Repo, config_toml_path: Path = pyproject_toml_file, ) -> Result: version_n_buildmeta = next_version_str.split("+", maxsplit=1) version_n_prerelease = version_n_buildmeta[0].split("-", maxsplit=1) build_metadata_args = ( ["--build-metadata", version_n_buildmeta[-1]] if len(version_n_buildmeta) > 1 else [] ) prerelease_args = ( [ "--as-prerelease", "--prerelease-token", version_n_prerelease[-1].split(".", maxsplit=1)[0], ] if len(version_n_prerelease) > 1 else [] ) # Initial run to write the RST changelog # 1. configure PSR to write the RST changelog with the RST default insertion flag update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_rst_file), toml_file=config_toml_path, ) cli_cmd = [*write_changelog_only_cmd, *prerelease_args, *build_metadata_args] result = run_cli(cli_cmd[1:], env={Github.DEFAULT_ENV_TOKEN_NAME: "1234"}) assert_successful_exit_code(result, cli_cmd) # Reset the index in case PSR added anything to the index git_repo.git.reset("--mixed", "HEAD") # Add the changelog file to the git index but reset the working directory git_repo.git.add(str(changelog_rst_file.resolve())) git_repo.git.checkout("--", ".") # Actual run to release & write the MD changelog cli_cmd = [ *base_version_cmd, *prerelease_args, *build_metadata_args, ] result = run_cli(cli_cmd[1:], env={Github.DEFAULT_ENV_TOKEN_NAME: "1234"}) assert_successful_exit_code(result, cli_cmd) return result return _run_psr_release python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/git_flow/000077500000000000000000000000001506116242600270075ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/git_flow/__init__.py000066400000000000000000000000001506116242600311060ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/git_flow/test_repo_1_channel.py000066400000000000000000000146551506116242600333100ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from tests.fixtures.repos.git_flow import ( repo_w_git_flow_conventional_commits, repo_w_git_flow_emoji_commits, repo_w_git_flow_scipy_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.version.version import Version from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_git_flow_conventional_commits.__name__, repo_w_git_flow_emoji_commits.__name__, repo_w_git_flow_scipy_commits.__name__, ] ], ) def test_gitflow_repo_rebuild_1_channel( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_git_flow_repo_w_1_release_channels: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, pyproject_toml_file: Path, changelog_md_file: Path, changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_git_flow_repo_w_1_release_channels( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions organized_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = organized_steps.pop(None) unreleased_steps = organized_steps.pop("Unreleased", None) if unreleased_steps: raise ValueError("Unreleased steps found. Not Supported yet!") release_tags_2_steps = cast("dict[Version, list[RepoActions]]", organized_steps) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, # type: ignore[arg-type] files_to_remove=[ changelog_md_file, changelog_rst_file, ], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_version, steps in release_tags_2_steps.items(): curr_release_tag = curr_version.as_tag() # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / pyproject_toml_file ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, ) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = ( mirror_repo_dir / pyproject_toml_file ).read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/git_flow/test_repo_2_channels.py000066400000000000000000000150621506116242600334650ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from tests.fixtures.repos.git_flow import ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.version.version import Version from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, ] ], ) def test_gitflow_repo_rebuild_2_channels( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_git_flow_repo_w_2_release_channels: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, pyproject_toml_file: Path, changelog_md_file: Path, changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_git_flow_repo_w_2_release_channels( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions organized_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = organized_steps.pop(None) unreleased_steps = organized_steps.pop("Unreleased", None) if unreleased_steps: raise ValueError("Unreleased steps found. Not Supported yet!") release_tags_2_steps = cast("dict[Version, list[RepoActions]]", organized_steps) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, # type: ignore[arg-type] files_to_remove=[ changelog_md_file, changelog_rst_file, ], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_version, steps in release_tags_2_steps.items(): curr_release_tag = curr_version.as_tag() # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / pyproject_toml_file ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, ) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = ( mirror_repo_dir / pyproject_toml_file ).read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/git_flow/test_repo_3_channels.py000066400000000000000000000154141506116242600334670ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from tests.fixtures.repos.git_flow import ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.version.version import Version from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, ] ], ) def test_gitflow_repo_rebuild_3_channels( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_git_flow_repo_w_3_release_channels: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, pyproject_toml_file: Path, changelog_md_file: Path, changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_git_flow_repo_w_3_release_channels( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions organized_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = organized_steps.pop(None) unreleased_steps = organized_steps.pop("Unreleased", None) if unreleased_steps: raise ValueError("Unreleased steps found. Not Supported yet!") release_tags_2_steps = cast("dict[Version, list[RepoActions]]", organized_steps) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, # type: ignore[arg-type] files_to_remove=[ changelog_md_file, changelog_rst_file, ], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_version, steps in release_tags_2_steps.items(): curr_release_tag = curr_version.as_tag() # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / pyproject_toml_file ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, ) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = ( mirror_repo_dir / pyproject_toml_file ).read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/git_flow/test_repo_4_channels.py000066400000000000000000000151501506116242600334650ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from tests.fixtures.repos.git_flow import ( repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.version.version import Version from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits.__name__, ] ], ) def test_gitflow_repo_rebuild_4_channels( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_git_flow_repo_w_4_release_channels: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, pyproject_toml_file: Path, changelog_md_file: Path, changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_git_flow_repo_w_4_release_channels( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions organized_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = organized_steps.pop(None) unreleased_steps = organized_steps.pop("Unreleased", None) if unreleased_steps: raise ValueError("Unreleased steps found. Not Supported yet!") release_tags_2_steps = cast("dict[Version, list[RepoActions]]", organized_steps) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, # type: ignore[arg-type] files_to_remove=[ changelog_md_file, changelog_rst_file, ], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_version, steps in release_tags_2_steps.items(): curr_release_tag = curr_version.as_tag() # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / pyproject_toml_file ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, ) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = ( mirror_repo_dir / pyproject_toml_file ).read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/github_flow/000077500000000000000000000000001506116242600275065ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/github_flow/__init__.py000066400000000000000000000000001506116242600316050ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/github_flow/test_repo_1_channel.py000066400000000000000000000151651506116242600340040ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from tests.fixtures.repos.github_flow import ( repo_w_github_flow_w_default_release_channel_conventional_commits, repo_w_github_flow_w_default_release_channel_emoji_commits, repo_w_github_flow_w_default_release_channel_scipy_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.version.version import Version from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, repo_w_github_flow_w_default_release_channel_emoji_commits.__name__, repo_w_github_flow_w_default_release_channel_scipy_commits.__name__, ] ], ) def test_githubflow_repo_rebuild_1_channel( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_repo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, pyproject_toml_file: Path, changelog_md_file: Path, changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_repo_w_github_flow_w_default_release_channel( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions organized_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = organized_steps.pop(None) unreleased_steps = organized_steps.pop("Unreleased", None) if unreleased_steps: raise ValueError("Unreleased steps found. Not Supported yet!") release_tags_2_steps = cast("dict[Version, list[RepoActions]]", organized_steps) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, # type: ignore[arg-type] files_to_remove=[ changelog_md_file, changelog_rst_file, ], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_version, steps in release_tags_2_steps.items(): curr_release_tag = curr_version.as_tag() # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / pyproject_toml_file ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, ) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = ( mirror_repo_dir / pyproject_toml_file ).read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred test_repo_1_channel_branch_update_merge.py000066400000000000000000000162031506116242600377550ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/github_flowfrom __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from tests.const import ( DEFAULT_BRANCH_NAME, ) from tests.fixtures.repos.github_flow import ( repo_w_github_flow_w_default_release_n_branch_update_merge_conventional_commits, repo_w_github_flow_w_default_release_n_branch_update_merge_emoji_commits, repo_w_github_flow_w_default_release_n_branch_update_merge_scipy_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.version.version import Version from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.xfail( reason="Should pass after [#1252](https://github.com/python-semantic-release/python-semantic-release/issues/1252) is fixed", ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_github_flow_w_default_release_n_branch_update_merge_conventional_commits.__name__, repo_w_github_flow_w_default_release_n_branch_update_merge_emoji_commits.__name__, repo_w_github_flow_w_default_release_n_branch_update_merge_scipy_commits.__name__, ] ], ) def test_github_flow_repo_w_default_release_n_branch_update_merge( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_github_flow_repo_w_default_release_n_branch_update_merge: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, pyproject_toml_file: Path, changelog_md_file: Path, changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = ( build_github_flow_repo_w_default_release_n_branch_update_merge( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions organized_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = organized_steps.pop(None) unreleased_steps = organized_steps.pop("Unreleased", None) if unreleased_steps: raise ValueError("Unreleased steps found. Not Supported yet!") release_tags_2_steps = cast("dict[Version, list[RepoActions]]", organized_steps) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, # type: ignore[arg-type] files_to_remove=[ changelog_md_file, changelog_rst_file, ], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_version, steps in release_tags_2_steps.items(): curr_release_tag = curr_version.as_tag() # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo head_reference_name = ( curr_release_tag if curr_release_tag != "Unreleased" else DEFAULT_BRANCH_NAME ) target_git_repo.git.checkout(head_reference_name, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / pyproject_toml_file ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, ) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = ( mirror_repo_dir / pyproject_toml_file ).read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred test_repo_2_channels.py000066400000000000000000000152221506116242600341030ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/github_flowfrom __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from tests.fixtures.repos.github_flow import ( repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_github_flow_w_feature_release_channel_emoji_commits, repo_w_github_flow_w_feature_release_channel_scipy_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.version.version import Version from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_emoji_commits.__name__, repo_w_github_flow_w_feature_release_channel_scipy_commits.__name__, ] ], ) def test_githubflow_repo_rebuild_2_channels( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_repo_w_github_flow_w_feature_release_channel: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, pyproject_toml_file: Path, changelog_md_file: Path, changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] # type: ignore[assignment] ) target_repo_definition = build_repo_w_github_flow_w_feature_release_channel( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions organized_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = organized_steps.pop(None) unreleased_steps = organized_steps.pop("Unreleased", None) if unreleased_steps: raise ValueError("Unreleased steps found. Not Supported yet!") release_tags_2_steps = cast("dict[Version, list[RepoActions]]", organized_steps) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, # type: ignore[arg-type] files_to_remove=[ changelog_md_file, changelog_rst_file, ], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_version, steps in release_tags_2_steps.items(): curr_release_tag = curr_version.as_tag() # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / pyproject_toml_file ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, ) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = ( mirror_repo_dir / pyproject_toml_file ).read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/github_flow_monorepo/000077500000000000000000000000001506116242600314245ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/github_flow_monorepo/__init__.py000066400000000000000000000000001506116242600335230ustar00rootroot00000000000000test_monorepo_1_channel.py000066400000000000000000000237301506116242600365310ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/github_flow_monorepofrom __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from semantic_release.version.version import Version from tests.const import RepoActionStep from tests.fixtures.monorepos.github_flow import ( monorepo_w_github_flow_w_default_release_channel_conventional_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from typing import Literal, Sequence from unittest.mock import MagicMock from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionConfigureMonorepo, RepoActionCreateMonorepo, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ monorepo_w_github_flow_w_default_release_channel_conventional_commits.__name__, ] ], ) def test_githubflow_monorepo_rebuild_1_channel( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_monorepo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, monorepo_pkg1_pyproject_toml_file: Path, monorepo_pkg2_pyproject_toml_file: Path, monorepo_pkg1_version_py_file: Path, monorepo_pkg2_version_py_file: Path, monorepo_pkg1_changelog_md_file: Path, monorepo_pkg2_changelog_md_file: Path, monorepo_pkg1_changelog_rst_file: Path, monorepo_pkg2_changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type = cast( "CommitConvention", repo_fixture_name.split("commits", 1)[0].split("_")[-2] ) target_repo_definition = build_monorepo_w_github_flow_w_default_release_channel( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions release_tags_2_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = cast( "Sequence[RepoActionConfigure | RepoActionCreateMonorepo | RepoActionConfigureMonorepo]", release_tags_2_steps.pop(None), ) release_versions_2_steps = cast( "dict[Version | Literal['Unreleased'], list[RepoActions]]", release_tags_2_steps, ) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, files_to_remove=[], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_key, steps in release_versions_2_steps.items(): curr_release_str = ( curr_release_key.as_tag() if isinstance(curr_release_key, Version) else curr_release_key ) # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo if curr_release_str != "Unreleased": target_git_repo.git.checkout(curr_release_str, detach=True, force=True) expected_pkg1_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg1_changelog_md_file ) expected_pkg2_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg2_changelog_md_file ) expected_pkg1_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg1_changelog_rst_file ) expected_pkg2_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg2_changelog_rst_file ) expected_pkg1_pyproject_toml_content = ( target_repo_dir / monorepo_pkg1_pyproject_toml_file ).read_text() expected_pkg2_pyproject_toml_content = ( target_repo_dir / monorepo_pkg2_pyproject_toml_file ).read_text() expected_pkg1_version_file_content = ( target_repo_dir / monorepo_pkg1_version_py_file ).read_text() expected_pkg2_version_file_content = ( target_repo_dir / monorepo_pkg2_version_py_file ).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, # stop before the release step repo_construction_steps=steps[ : -1 if curr_release_str != "Unreleased" else None ], ) release_directory = mirror_repo_dir for step in steps[::-1]: # reverse order if step["action"] == RepoActionStep.CHANGE_DIRECTORY: release_directory = ( mirror_repo_dir if str(Path(step["details"]["directory"])) == str(mirror_repo_dir.root) else Path(step["details"]["directory"]) ) release_directory = ( mirror_repo_dir / release_directory if not release_directory.is_absolute() else release_directory ) if mirror_repo_dir not in release_directory.parents: release_directory = mirror_repo_dir break # Act: run PSR on the repo instead of the RELEASE step if curr_release_str != "Unreleased": release_action_step = cast("RepoActionRelease", steps[-1]) with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(release_directory): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, config_toml_path=Path("pyproject.toml"), ) else: # run psr changelog command to validate changelog pass # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pkg1_pyproject_toml_content = ( mirror_repo_dir / monorepo_pkg1_pyproject_toml_file ).read_text() actual_pkg2_pyproject_toml_content = ( mirror_repo_dir / monorepo_pkg2_pyproject_toml_file ).read_text() actual_pkg1_version_file_content = ( mirror_repo_dir / monorepo_pkg1_version_py_file ).read_text() actual_pkg2_version_file_content = ( mirror_repo_dir / monorepo_pkg2_version_py_file ).read_text() actual_pkg1_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg1_changelog_md_file ) actual_pkg2_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg2_changelog_md_file ) actual_pkg1_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg1_changelog_rst_file ) actual_pkg2_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg2_changelog_rst_file ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert ( expected_pkg1_pyproject_toml_content == actual_pkg1_pyproject_toml_content ) assert ( expected_pkg2_pyproject_toml_content == actual_pkg2_pyproject_toml_content ) assert expected_pkg1_version_file_content == actual_pkg1_version_file_content assert expected_pkg2_version_file_content == actual_pkg2_version_file_content # Make sure changelog is updated assert expected_pkg1_md_changelog_content == actual_pkg1_md_changelog_content assert expected_pkg2_md_changelog_content == actual_pkg2_md_changelog_content assert expected_pkg1_rst_changelog_content == actual_pkg1_rst_changelog_content assert expected_pkg2_rst_changelog_content == actual_pkg2_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text if curr_release_str != "Unreleased": # Make sure tag is created assert curr_release_str in [tag.name for tag in mirror_git_repo.tags] # Make sure publishing actions occurred assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred test_monorepo_2_channels.py000066400000000000000000000237301506116242600367150ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/github_flow_monorepofrom __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from semantic_release.version.version import Version from tests.const import RepoActionStep from tests.fixtures.monorepos.github_flow import ( monorepo_w_github_flow_w_feature_release_channel_conventional_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from typing import Literal, Sequence from unittest.mock import MagicMock from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionConfigureMonorepo, RepoActionCreateMonorepo, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ monorepo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, ] ], ) def test_githubflow_monorepo_rebuild_2_channels( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_monorepo_w_github_flow_w_feature_release_channel: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, monorepo_pkg1_pyproject_toml_file: Path, monorepo_pkg2_pyproject_toml_file: Path, monorepo_pkg1_version_py_file: Path, monorepo_pkg2_version_py_file: Path, monorepo_pkg1_changelog_md_file: Path, monorepo_pkg2_changelog_md_file: Path, monorepo_pkg1_changelog_rst_file: Path, monorepo_pkg2_changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type = cast( "CommitConvention", repo_fixture_name.split("commits", 1)[0].split("_")[-2] ) target_repo_definition = build_monorepo_w_github_flow_w_feature_release_channel( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions release_tags_2_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = cast( "Sequence[RepoActionConfigure | RepoActionCreateMonorepo | RepoActionConfigureMonorepo]", release_tags_2_steps.pop(None), ) release_versions_2_steps = cast( "dict[Version | Literal['Unreleased'], list[RepoActions]]", release_tags_2_steps, ) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, files_to_remove=[], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_key, steps in release_versions_2_steps.items(): curr_release_str = ( curr_release_key.as_tag() if isinstance(curr_release_key, Version) else curr_release_key ) # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo if curr_release_str != "Unreleased": target_git_repo.git.checkout(curr_release_str, detach=True, force=True) expected_pkg1_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg1_changelog_md_file ) expected_pkg2_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg2_changelog_md_file ) expected_pkg1_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg1_changelog_rst_file ) expected_pkg2_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg2_changelog_rst_file ) expected_pkg1_pyproject_toml_content = ( target_repo_dir / monorepo_pkg1_pyproject_toml_file ).read_text() expected_pkg2_pyproject_toml_content = ( target_repo_dir / monorepo_pkg2_pyproject_toml_file ).read_text() expected_pkg1_version_file_content = ( target_repo_dir / monorepo_pkg1_version_py_file ).read_text() expected_pkg2_version_file_content = ( target_repo_dir / monorepo_pkg2_version_py_file ).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, # stop before the release step repo_construction_steps=steps[ : -1 if curr_release_str != "Unreleased" else None ], ) release_directory = mirror_repo_dir for step in steps[::-1]: # reverse order if step["action"] == RepoActionStep.CHANGE_DIRECTORY: release_directory = ( mirror_repo_dir if str(Path(step["details"]["directory"])) == str(mirror_repo_dir.root) else Path(step["details"]["directory"]) ) release_directory = ( mirror_repo_dir / release_directory if not release_directory.is_absolute() else release_directory ) if mirror_repo_dir not in release_directory.parents: release_directory = mirror_repo_dir break # Act: run PSR on the repo instead of the RELEASE step if curr_release_str != "Unreleased": release_action_step = cast("RepoActionRelease", steps[-1]) with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(release_directory): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, config_toml_path=Path("pyproject.toml"), ) else: # run psr changelog command to validate changelog pass # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pkg1_pyproject_toml_content = ( mirror_repo_dir / monorepo_pkg1_pyproject_toml_file ).read_text() actual_pkg2_pyproject_toml_content = ( mirror_repo_dir / monorepo_pkg2_pyproject_toml_file ).read_text() actual_pkg1_version_file_content = ( mirror_repo_dir / monorepo_pkg1_version_py_file ).read_text() actual_pkg2_version_file_content = ( mirror_repo_dir / monorepo_pkg2_version_py_file ).read_text() actual_pkg1_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg1_changelog_md_file ) actual_pkg2_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg2_changelog_md_file ) actual_pkg1_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg1_changelog_rst_file ) actual_pkg2_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg2_changelog_rst_file ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert ( expected_pkg1_pyproject_toml_content == actual_pkg1_pyproject_toml_content ) assert ( expected_pkg2_pyproject_toml_content == actual_pkg2_pyproject_toml_content ) assert expected_pkg1_version_file_content == actual_pkg1_version_file_content assert expected_pkg2_version_file_content == actual_pkg2_version_file_content # Make sure changelog is updated assert expected_pkg1_md_changelog_content == actual_pkg1_md_changelog_content assert expected_pkg2_md_changelog_content == actual_pkg2_md_changelog_content assert expected_pkg1_rst_changelog_content == actual_pkg1_rst_changelog_content assert expected_pkg2_rst_changelog_content == actual_pkg2_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text if curr_release_str != "Unreleased": # Make sure tag is created assert curr_release_str in [tag.name for tag in mirror_git_repo.tags] # Make sure publishing actions occurred assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/trunk_based_dev/000077500000000000000000000000001506116242600303345ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/trunk_based_dev/__init__.py000066400000000000000000000000001506116242600324330ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/trunk_based_dev/test_repo_trunk.py000066400000000000000000000147311506116242600341430ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from tests.fixtures.repos.trunk_based_dev import ( repo_w_trunk_only_conventional_commits, repo_w_trunk_only_emoji_commits, repo_w_trunk_only_scipy_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.version.version import Version from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ repo_w_trunk_only_conventional_commits.__name__, *[ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ], ) def test_trunk_repo_rebuild_only_official_releases( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_trunk_only_repo_w_tags: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, pyproject_toml_file: Path, changelog_md_file: Path, changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_trunk_only_repo_w_tags( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions organized_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = organized_steps.pop(None) unreleased_steps = organized_steps.pop("Unreleased", None) if unreleased_steps: raise ValueError("Unreleased steps found. Not Supported yet!") release_tags_2_steps = cast("dict[Version, list[RepoActions]]", organized_steps) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, # type: ignore[arg-type] files_to_remove=[ changelog_md_file, changelog_rst_file, ], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_version, steps in release_tags_2_steps.items(): curr_release_tag = curr_version.as_tag() # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / pyproject_toml_file ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, ) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = ( mirror_repo_dir / pyproject_toml_file ).read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred test_repo_trunk_dual_version_support.py000066400000000000000000000154361506116242600404350ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/trunk_based_devfrom __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from tests.const import ( DEFAULT_BRANCH_NAME, ) from tests.fixtures.repos.trunk_based_dev import ( repo_w_trunk_only_dual_version_spt_conventional_commits, repo_w_trunk_only_dual_version_spt_emoji_commits, repo_w_trunk_only_dual_version_spt_scipy_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.version.version import Version from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_trunk_only_dual_version_spt_conventional_commits.__name__, repo_w_trunk_only_dual_version_spt_emoji_commits.__name__, repo_w_trunk_only_dual_version_spt_scipy_commits.__name__, ] ], ) def test_trunk_repo_rebuild_dual_version_spt_official_releases_only( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_trunk_only_repo_w_dual_version_support: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, pyproject_toml_file: Path, changelog_md_file: Path, changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_trunk_only_repo_w_dual_version_support( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions organized_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = organized_steps.pop(None) unreleased_steps = organized_steps.pop("Unreleased", None) if unreleased_steps: raise ValueError("Unreleased steps found. Not Supported yet!") release_tags_2_steps = cast("dict[Version, list[RepoActions]]", organized_steps) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, # type: ignore[arg-type] files_to_remove=[ changelog_md_file, changelog_rst_file, ], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_version, steps in release_tags_2_steps.items(): curr_release_tag = curr_version.as_tag() # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo head_reference_name = ( curr_release_tag if curr_release_tag != "Unreleased" else DEFAULT_BRANCH_NAME ) target_git_repo.git.checkout(head_reference_name, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / pyproject_toml_file ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, ) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = ( mirror_repo_dir / pyproject_toml_file ).read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred test_repo_trunk_dual_version_support_w_prereleases.py000066400000000000000000000157251506116242600433560ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/trunk_based_devfrom __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from tests.const import ( DEFAULT_BRANCH_NAME, ) from tests.fixtures.repos.trunk_based_dev import ( repo_w_trunk_only_dual_version_spt_w_prereleases_conventional_commits, repo_w_trunk_only_dual_version_spt_w_prereleases_emoji_commits, repo_w_trunk_only_dual_version_spt_w_prereleases_scipy_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.version.version import Version from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.xfail(reason="Not yet implemented, see issue #555 for details") @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_trunk_only_dual_version_spt_w_prereleases_conventional_commits.__name__, repo_w_trunk_only_dual_version_spt_w_prereleases_emoji_commits.__name__, repo_w_trunk_only_dual_version_spt_w_prereleases_scipy_commits.__name__, ] ], ) def test_trunk_repo_rebuild_dual_version_spt_w_official_n_prereleases( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_trunk_only_repo_w_dual_version_spt_w_prereleases: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, pyproject_toml_file: Path, changelog_md_file: Path, changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_trunk_only_repo_w_dual_version_spt_w_prereleases( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions organized_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = organized_steps.pop(None) unreleased_steps = organized_steps.pop("Unreleased", None) if unreleased_steps: raise ValueError("Unreleased steps found. Not Supported yet!") release_tags_2_steps = cast("dict[Version, list[RepoActions]]", organized_steps) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, # type: ignore[arg-type] files_to_remove=[ changelog_md_file, changelog_rst_file, ], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_version, steps in release_tags_2_steps.items(): curr_release_tag = curr_version.as_tag() # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo head_reference_name = ( curr_release_tag if curr_release_tag != "Unreleased" else DEFAULT_BRANCH_NAME ) target_git_repo.git.checkout(head_reference_name, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / pyproject_toml_file ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, ) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = ( mirror_repo_dir / pyproject_toml_file ).read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred test_repo_trunk_w_prereleases.py000066400000000000000000000150241506116242600370000ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/trunk_based_devfrom __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from tests.fixtures.repos.trunk_based_dev import ( repo_w_trunk_only_n_prereleases_conventional_commits, repo_w_trunk_only_n_prereleases_emoji_commits, repo_w_trunk_only_n_prereleases_scipy_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.version.version import Version from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_emoji_commits.__name__, repo_w_trunk_only_n_prereleases_scipy_commits.__name__, ] ], ) def test_trunk_repo_rebuild_w_prereleases( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_trunk_only_repo_w_prerelease_tags: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, pyproject_toml_file: Path, changelog_md_file: Path, changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_trunk_only_repo_w_prerelease_tags( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions organized_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = organized_steps.pop(None) unreleased_steps = organized_steps.pop("Unreleased", None) if unreleased_steps: raise ValueError("Unreleased steps found. Not Supported yet!") release_tags_2_steps = cast("dict[Version, list[RepoActions]]", organized_steps) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, # type: ignore[arg-type] files_to_remove=[ changelog_md_file, changelog_rst_file, ], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_version, steps in release_tags_2_steps.items(): curr_release_tag = curr_version.as_tag() # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / pyproject_toml_file ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, ) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = ( mirror_repo_dir / pyproject_toml_file ).read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/trunk_based_dev_monorepo/000077500000000000000000000000001506116242600322525ustar00rootroot00000000000000__init__.py000066400000000000000000000000001506116242600342720ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/trunk_based_dev_monorepotest_monorepo_trunk.py000066400000000000000000000236071506116242600366750ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/e2e/cmd_version/bump_version/trunk_based_dev_monorepofrom __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from semantic_release.version.version import Version from tests.const import RepoActionStep from tests.fixtures.monorepos.trunk_based_dev import ( monorepo_w_trunk_only_releases_conventional_commits, ) from tests.util import temporary_working_directory if TYPE_CHECKING: from typing import Literal, Sequence from unittest.mock import MagicMock from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import ( InitMirrorRepo4RebuildFn, RunPSReleaseFn, ) from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionConfigureMonorepo, RepoActionCreateMonorepo, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ monorepo_w_trunk_only_releases_conventional_commits.__name__, ] ], ) def test_trunk_monorepo_rebuild_1_channel( repo_fixture_name: str, run_psr_release: RunPSReleaseFn, build_trunk_only_monorepo_w_tags: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, monorepo_pkg1_pyproject_toml_file: Path, monorepo_pkg2_pyproject_toml_file: Path, monorepo_pkg1_version_py_file: Path, monorepo_pkg2_version_py_file: Path, monorepo_pkg1_changelog_md_file: Path, monorepo_pkg2_changelog_md_file: Path, monorepo_pkg1_changelog_rst_file: Path, monorepo_pkg2_changelog_rst_file: Path, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type = cast( "CommitConvention", repo_fixture_name.split("commits", 1)[0].split("_")[-2] ) target_repo_definition = build_trunk_only_monorepo_w_tags( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) # split repo actions by release actions release_tags_2_steps = split_repo_actions_by_release_tags(target_repo_definition) configuration_steps = cast( "Sequence[RepoActionConfigure | RepoActionCreateMonorepo | RepoActionConfigureMonorepo]", release_tags_2_steps.pop(None), ) release_versions_2_steps = cast( "dict[Version | Literal['Unreleased'], list[RepoActions]]", release_tags_2_steps, ) # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_steps=configuration_steps, files_to_remove=[], ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_key, steps in release_versions_2_steps.items(): curr_release_str = ( curr_release_key.as_tag() if isinstance(curr_release_key, Version) else curr_release_key ) # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo if curr_release_str != "Unreleased": target_git_repo.git.checkout(curr_release_str, detach=True, force=True) expected_pkg1_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg1_changelog_md_file ) expected_pkg2_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg2_changelog_md_file ) expected_pkg1_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg1_changelog_rst_file ) expected_pkg2_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir, changelog_file=monorepo_pkg2_changelog_rst_file ) expected_pkg1_pyproject_toml_content = ( target_repo_dir / monorepo_pkg1_pyproject_toml_file ).read_text() expected_pkg2_pyproject_toml_content = ( target_repo_dir / monorepo_pkg2_pyproject_toml_file ).read_text() expected_pkg1_version_file_content = ( target_repo_dir / monorepo_pkg1_version_py_file ).read_text() expected_pkg2_version_file_content = ( target_repo_dir / monorepo_pkg2_version_py_file ).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, # stop before the release step repo_construction_steps=steps[ : -1 if curr_release_str != "Unreleased" else None ], ) release_directory = mirror_repo_dir for step in steps[::-1]: # reverse order if step["action"] == RepoActionStep.CHANGE_DIRECTORY: release_directory = ( mirror_repo_dir if str(Path(step["details"]["directory"])) == str(mirror_repo_dir.root) else Path(step["details"]["directory"]) ) release_directory = ( mirror_repo_dir / release_directory if not release_directory.is_absolute() else release_directory ) if mirror_repo_dir not in release_directory.parents: release_directory = mirror_repo_dir break # Act: run PSR on the repo instead of the RELEASE step if curr_release_str != "Unreleased": release_action_step = cast("RepoActionRelease", steps[-1]) with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(release_directory): run_psr_release( next_version_str=release_action_step["details"]["version"], git_repo=mirror_git_repo, config_toml_path=Path("pyproject.toml"), ) else: # run psr changelog command to validate changelog pass # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pkg1_pyproject_toml_content = ( mirror_repo_dir / monorepo_pkg1_pyproject_toml_file ).read_text() actual_pkg2_pyproject_toml_content = ( mirror_repo_dir / monorepo_pkg2_pyproject_toml_file ).read_text() actual_pkg1_version_file_content = ( mirror_repo_dir / monorepo_pkg1_version_py_file ).read_text() actual_pkg2_version_file_content = ( mirror_repo_dir / monorepo_pkg2_version_py_file ).read_text() actual_pkg1_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg1_changelog_md_file ) actual_pkg2_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg2_changelog_md_file ) actual_pkg1_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg1_changelog_rst_file ) actual_pkg2_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg2_changelog_rst_file ) # Evaluate (normal release actions should have occurred as expected) # ------------------------------------------------------------------ # Make sure version file is updated assert ( expected_pkg1_pyproject_toml_content == actual_pkg1_pyproject_toml_content ) assert ( expected_pkg2_pyproject_toml_content == actual_pkg2_pyproject_toml_content ) assert expected_pkg1_version_file_content == actual_pkg1_version_file_content assert expected_pkg2_version_file_content == actual_pkg2_version_file_content # Make sure changelog is updated assert expected_pkg1_md_changelog_content == actual_pkg1_md_changelog_content assert expected_pkg2_md_changelog_content == actual_pkg2_md_changelog_content assert expected_pkg1_rst_changelog_content == actual_pkg1_rst_changelog_content assert expected_pkg2_rst_changelog_content == actual_pkg2_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text if curr_release_str != "Unreleased": # Make sure tag is created assert curr_release_str in [tag.name for tag in mirror_git_repo.tags] # Make sure publishing actions occurred assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred python-semantic-release-10.4.1/tests/e2e/cmd_version/test_version.py000066400000000000000000000226561506116242600255760ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.hvcs.github import Github from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos import ( repo_w_no_tags_conventional_commits, repo_w_trunk_only_conventional_commits, ) from tests.util import assert_successful_exit_code if TYPE_CHECKING: from unittest.mock import MagicMock from git import Repo from requests_mock import Mocker from tests.conftest import RunCliFn from tests.e2e.conftest import StripLoggingMessagesFn from tests.fixtures.example_project import GetWheelFileFn, UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult, GetVersionsFromRepoBuildDefFn # No-op shouldn't change based on the branching/merging of the repository @pytest.mark.parametrize( "repo_result, next_release_version", # must use a repo that is ready for a release to prevent no release # logic from being triggered before the noop logic [(lazy_fixture(repo_w_no_tags_conventional_commits.__name__), "1.0.0")], ) def test_version_noop_is_noop( repo_result: BuiltRepoResult, next_release_version: str, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, get_wheel_file: GetWheelFileFn, ): repo: Repo = repo_result["repo"] build_result_file = get_wheel_file(next_release_version) # Setup: reset any uncommitted changes (if any) repo.git.reset("--hard") # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, "--noop", VERSION_SUBCMD] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha, "HEAD should not have changed" assert not tags_set_difference # No tag created # no build result assert not build_result_file.exists() # no file changes (since no commit was made then just check for non-committed changes) assert not repo.git.status(short=True) assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_no_git_verify( repo_result: BuiltRepoResult, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, ): repo = repo_result["repo"] # setup: set configuration setting update_pyproject_toml("tool.semantic_release.no_git_verify", True) repo.git.commit( m="chore: adjust project configuration for --no-verify release commits", a=True ) # setup: create executable pre-commit script precommit_hook = Path(repo.git_dir, "hooks", "pre-commit") precommit_hook.parent.mkdir(parents=True, exist_ok=True) precommit_hook.write_text( dedent( """\ #!/bin/sh echo >&2 "Always fail pre-commit" && exit 1; """ ) ) precommit_hook.chmod(0o754) # setup: set git configuration to have the pre-commit hook repo.git.config( "core.hookspath", str(precommit_hook.parent.relative_to(repo.working_dir)), local=True, ) # Take measurement beforehand head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Execute cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--patch"] result = run_cli(cli_cmd[1:]) # Take measurement after the command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)] ) def test_version_on_nonrelease_branch( repo_result: BuiltRepoResult, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): """ Given repo is on a non-release branch, When running the version command, Then no version release should happen which means no code changes, no build, no commit, no tag, no push, and no vcs release creation while returning a successful exit code """ repo = repo_result["repo"] branch = repo.create_head("next").checkout() expected_error_msg = ( f"branch '{branch.name}' isn't in any release groups; no release will be made\n" ) repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = sorted([tag.name for tag in repo.tags]) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert not result.stdout assert expected_error_msg == strip_logging_messages(result.stderr) # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) tags_after = sorted([tag.name for tag in repo.tags]) assert repo_status_before == repo.git.status(short=True) assert head_before == repo.head.commit.hexsha assert tags_before == tags_after assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_on_last_release( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): """ Given repo is on the last release version, When running the version command, Then no version release should happen which means no code changes, no build, no commit, no tag, no push, and no vcs release creation while returning a successful exit code and printing the last release version """ repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] expected_error_msg = ( f"No release will be made, {latest_release_version} has already been released!" ) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = sorted([tag.name for tag in repo.tags]) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD] result = run_cli(cli_cmd[1:], env={Github.DEFAULT_ENV_TOKEN_NAME: "1234"}) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = sorted([tag.name for tag in repo.tags]) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert f"{latest_release_version}\n" == result.stdout assert f"{expected_error_msg}\n" == strip_logging_messages(result.stderr) # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert tags_before == tags_after assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)] ) def test_version_only_tag_push( repo_result: BuiltRepoResult, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, ) -> None: """ Given a repo with no tags, When running the version command with the `--no-commit` and `--tag` flags, Then a tag should be created on the current commit, pushed, and a release created. """ repo = repo_result["repo"] # Setup head_before = repo.head.commit # Act cli_cmd = [ MAIN_PROG_NAME, VERSION_SUBCMD, "--no-commit", "--tag", ] result = run_cli(cli_cmd[1:]) # capture values after the command tag_after = repo.tags[-1].name head_after = repo.head.commit # Assert only tag was created, it was pushed and then release was created assert_successful_exit_code(result, cli_cmd) assert tag_after == "v1.0.0" assert head_before == head_after assert mocked_git_push.call_count == 1 # 0 for commit, 1 for tag assert post_mocker.call_count == 1 python-semantic-release-10.4.1/tests/e2e/cmd_version/test_version_build.py000066400000000000000000000316071506116242600267510ustar00rootroot00000000000000from __future__ import annotations import os import subprocess import sys from pathlib import Path from typing import TYPE_CHECKING from unittest import mock import pytest import shellingham import tomlkit from flatdict import FlatDict from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from tests.const import MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.repos import repo_w_trunk_only_conventional_commits from tests.util import assert_successful_exit_code, get_func_qual_name if TYPE_CHECKING: from tests.conftest import RunCliFn from tests.fixtures.example_project import GetWheelFileFn, UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult @pytest.mark.skipif(sys.platform == "win32", reason="Unix only") @pytest.mark.parametrize( "shell", filter( None, [ # because we will actually run the build command in this shell, we must ensure it exists "bash" if list( filter( lambda sh_exe: Path(sh_exe).exists(), ("/bin/bash", "/usr/bin/bash", "/usr/local/bin/bash"), ) ) else "", "zsh" if list( filter( lambda sh_exe: Path(sh_exe).exists(), ("/bin/zsh", "/usr/bin/zsh", "/usr/local/bin/zsh"), ) ) else "", ], ) or ["sh"], ) @pytest.mark.parametrize( "repo_result, cli_args, next_release_version", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), ["--patch"], "0.1.2", ) ], ) def test_version_runs_build_command( repo_result: BuiltRepoResult, cli_args: list[str], next_release_version: str, run_cli: RunCliFn, shell: str, get_wheel_file: GetWheelFileFn, example_pyproject_toml: Path, mocked_git_push: mock.MagicMock, post_mocker: mock.Mock, ): # Setup built_wheel_file = get_wheel_file(next_release_version) pyproject_config = FlatDict( tomlkit.loads(example_pyproject_toml.read_text(encoding="utf-8")), delimiter=".", ) build_command = pyproject_config.get("tool.semantic_release.build_command", "") patched_os_environment = { "CI": "true", "PATH": os.getenv("PATH", ""), "HOME": "/home/username", "VIRTUAL_ENV": "./.venv", # Simulate that all CI's are set "GITHUB_ACTIONS": "true", "GITLAB_CI": "true", "GITEA_ACTIONS": "true", "BITBUCKET_REPO_FULL_NAME": "python-semantic-release/python-semantic-release.git", "PSR_DOCKER_GITHUB_ACTION": "true", } # Wrap subprocess.run to capture the arguments to the call with mock.patch( get_func_qual_name(subprocess.run), wraps=subprocess.run, ) as patched_subprocess_run, mock.patch( get_func_qual_name(shellingham.detect_shell), return_value=(shell, shell) ): # ACT: run & force a new version that will trigger the build command cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *cli_args] result = run_cli(cli_cmd[1:], env=patched_os_environment) # Evaluate assert_successful_exit_code(result, cli_cmd) patched_subprocess_run.assert_called_with( [shell, "-c", build_command], check=True, env={ "NEW_VERSION": next_release_version, # injected into environment "PACKAGE_NAME": "", # PSR injected environment variable "CI": patched_os_environment["CI"], "BITBUCKET_CI": "true", # Converted "GITHUB_ACTIONS": patched_os_environment["GITHUB_ACTIONS"], "GITEA_ACTIONS": patched_os_environment["GITEA_ACTIONS"], "GITLAB_CI": patched_os_environment["GITLAB_CI"], "HOME": patched_os_environment["HOME"], "PATH": patched_os_environment["PATH"], "VIRTUAL_ENV": patched_os_environment["VIRTUAL_ENV"], "PSR_DOCKER_GITHUB_ACTION": patched_os_environment[ "PSR_DOCKER_GITHUB_ACTION" ], }, ) assert built_wheel_file.exists() assert mocked_git_push.call_count == 2 assert post_mocker.call_count == 1 @pytest.mark.skipif(sys.platform != "win32", reason="Windows only") @pytest.mark.parametrize("shell", ("powershell", "pwsh", "cmd")) @pytest.mark.parametrize( "repo_result, cli_args, next_release_version", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), ["--patch"], "0.1.2", ) ], ) def test_version_runs_build_command_windows( repo_result: BuiltRepoResult, cli_args: list[str], next_release_version: str, run_cli: RunCliFn, shell: str, get_wheel_file: GetWheelFileFn, example_pyproject_toml: Path, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: mock.MagicMock, post_mocker: mock.Mock, clean_os_environment: dict[str, str], ): if shell == "cmd": build_result_file = get_wheel_file("%NEW_VERSION%") update_pyproject_toml( "tool.semantic_release.build_command", str.join( " && ", [ f"mkdir {build_result_file.parent}", f"type nul > {build_result_file}", f"echo 'Built distribution: {build_result_file}'", ], ), ) # Setup package_name = "my-package" update_pyproject_toml("project.name", package_name) built_wheel_file = get_wheel_file(next_release_version) pyproject_config = FlatDict( tomlkit.loads(example_pyproject_toml.read_text(encoding="utf-8")), delimiter=".", ) build_command = pyproject_config.get("tool.semantic_release.build_command", "") patched_os_environment = { **clean_os_environment, "CI": "true", "VIRTUAL_ENV": "./.venv", # Simulate that all CI's are set "GITHUB_ACTIONS": "true", "GITLAB_CI": "true", "GITEA_ACTIONS": "true", "BITBUCKET_REPO_FULL_NAME": "python-semantic-release/python-semantic-release.git", "PSR_DOCKER_GITHUB_ACTION": "true", } # Wrap subprocess.run to capture the arguments to the call with mock.patch( get_func_qual_name(subprocess.run), wraps=subprocess.run, ) as patched_subprocess_run, mock.patch( get_func_qual_name(shellingham.detect_shell), return_value=(shell, shell) ): # ACT: run & force a new version that will trigger the build command cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *cli_args] result = run_cli(cli_cmd[1:], env=patched_os_environment) # Evaluate assert_successful_exit_code(result, cli_cmd) patched_subprocess_run.assert_called_once_with( [shell, "/c" if shell == "cmd" else "-Command", build_command], check=True, env={ **clean_os_environment, "NEW_VERSION": next_release_version, # injected into environment "PACKAGE_NAME": package_name, # PSR injected environment variable "CI": patched_os_environment["CI"], "BITBUCKET_CI": "true", # Converted "GITHUB_ACTIONS": patched_os_environment["GITHUB_ACTIONS"], "GITEA_ACTIONS": patched_os_environment["GITEA_ACTIONS"], "GITLAB_CI": patched_os_environment["GITLAB_CI"], "VIRTUAL_ENV": patched_os_environment["VIRTUAL_ENV"], "PSR_DOCKER_GITHUB_ACTION": patched_os_environment[ "PSR_DOCKER_GITHUB_ACTION" ], }, ) dist_file_exists = built_wheel_file.exists() assert dist_file_exists, f"\n Expected wheel file to be created at {built_wheel_file}, but it does not exist." assert mocked_git_push.call_count == 2 assert post_mocker.call_count == 1 @pytest.mark.parametrize( "repo_result, cli_args, next_release_version", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), ["--patch"], "0.1.2", ) ], ) def test_version_runs_build_command_w_user_env( repo_result: BuiltRepoResult, cli_args: list[str], next_release_version: str, run_cli: RunCliFn, example_pyproject_toml: Path, update_pyproject_toml: UpdatePyprojectTomlFn, clean_os_environment: dict[str, str], ): # Setup patched_os_environment = { **clean_os_environment, "CI": "true", "VIRTUAL_ENV": "./.venv", # Simulate that all CI's are set "GITHUB_ACTIONS": "true", "GITLAB_CI": "true", "GITEA_ACTIONS": "true", "BITBUCKET_REPO_FULL_NAME": "python-semantic-release/python-semantic-release.git", "PSR_DOCKER_GITHUB_ACTION": "true", # User environment variables (varying passthrough results) "MY_CUSTOM_VARIABLE": "custom", "IGNORED_VARIABLE": "ignore_me", "OVERWRITTEN_VAR": "initial", "SET_AS_EMPTY_VAR": "not_empty", } pyproject_config = FlatDict( tomlkit.loads(example_pyproject_toml.read_text(encoding="utf-8")), delimiter=".", ) build_command = pyproject_config.get("tool.semantic_release.build_command", "") update_pyproject_toml( "tool.semantic_release.build_command_env", [ # Includes arbitrary whitespace which will be removed " MY_CUSTOM_VARIABLE ", # detect and pass from environment " OVERWRITTEN_VAR = overrided", # pass hardcoded value which overrides environment " SET_AS_EMPTY_VAR = ", # keep variable initialized but as empty string " HARDCODED_VAR=hardcoded ", # pass hardcoded value that doesn't override anything "VAR_W_EQUALS = a-var===condition", # only splits on 1st equals sign "=ignored-invalid-named-var", # TODO: validation error instead, but currently just ignore ], ) package_name = "my-package" update_pyproject_toml("project.name", package_name) # Mock out subprocess.run with mock.patch( get_func_qual_name(subprocess.run), return_value=subprocess.CompletedProcess(args=(), returncode=0), ) as patched_subprocess_run, mock.patch( get_func_qual_name(shellingham.detect_shell), return_value=("bash", "/usr/bin/bash"), ): cli_cmd = [ MAIN_PROG_NAME, VERSION_SUBCMD, *cli_args, "--no-commit", "--no-tag", "--no-changelog", "--no-push", ] # ACT: run & force a new version that will trigger the build command result = run_cli(cli_cmd[1:], env=patched_os_environment) # Evaluate # [1] Make sure it did not error internally assert_successful_exit_code(result, cli_cmd) # [2] Make sure the subprocess was called with the correct environment patched_subprocess_run.assert_called_once_with( ["bash", "-c", build_command], check=True, env={ **clean_os_environment, "NEW_VERSION": next_release_version, # injected into environment "PACKAGE_NAME": package_name, # PSR injected environment variable "CI": patched_os_environment["CI"], "BITBUCKET_CI": "true", # Converted "GITHUB_ACTIONS": patched_os_environment["GITHUB_ACTIONS"], "GITEA_ACTIONS": patched_os_environment["GITEA_ACTIONS"], "GITLAB_CI": patched_os_environment["GITLAB_CI"], "VIRTUAL_ENV": patched_os_environment["VIRTUAL_ENV"], "PSR_DOCKER_GITHUB_ACTION": patched_os_environment[ "PSR_DOCKER_GITHUB_ACTION" ], "MY_CUSTOM_VARIABLE": patched_os_environment["MY_CUSTOM_VARIABLE"], "OVERWRITTEN_VAR": "overrided", "SET_AS_EMPTY_VAR": "", "HARDCODED_VAR": "hardcoded", # Note that IGNORED_VARIABLE is not here. "VAR_W_EQUALS": "a-var===condition", }, ) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_version_skips_build_command_with_skip_build( run_cli: RunCliFn, mocked_git_push: mock.MagicMock, post_mocker: mock.Mock, ): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--patch", "--skip-build"] with mock.patch( get_func_qual_name(subprocess.run), return_value=subprocess.CompletedProcess(args=(), returncode=0), ) as patched_subprocess_run: # Act: force a new version result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) patched_subprocess_run.assert_not_called() assert mocked_git_push.call_count == 2 assert post_mocker.call_count == 1 python-semantic-release-10.4.1/tests/e2e/cmd_version/test_version_bump.py000066400000000000000000004114441506116242600266160ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest import tomlkit # Limitation in pytest-lazy-fixture - see https://stackoverflow.com/a/69884019 from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.commit_parser.conventional import ConventionalCommitParser from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from tests.const import EXAMPLE_PROJECT_NAME, MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures import ( conventional_chore_commits, conventional_major_commits, conventional_minor_commits, conventional_patch_commits, emoji_chore_commits, emoji_major_commits, emoji_minor_commits, emoji_patch_commits, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits, repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_initial_commit, repo_w_no_tags_conventional_commits, repo_w_no_tags_conventional_commits_w_zero_version, repo_w_no_tags_emoji_commits, repo_w_no_tags_scipy_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_emoji_commits, repo_w_trunk_only_n_prereleases_conventional_commits, repo_w_trunk_only_n_prereleases_emoji_commits, repo_w_trunk_only_n_prereleases_scipy_commits, repo_w_trunk_only_scipy_commits, scipy_chore_commits, scipy_major_commits, scipy_minor_commits, scipy_patch_commits, ) from tests.util import ( add_text_to_file, assert_successful_exit_code, dynamic_python_import, xdist_sort_hack, ) if TYPE_CHECKING: from unittest.mock import MagicMock from requests_mock import Mocker from tests.conftest import GetStableDateNowFn, RunCliFn from tests.fixtures.example_project import ( ExProjectDir, GetExpectedVersionPyFileContentFn, UpdatePyprojectTomlFn, ) from tests.fixtures.git_repo import BuiltRepoResult @pytest.mark.parametrize( "repo_result, cli_args, next_release_version", [ *( ( lazy_fixture( repo_w_no_tags_conventional_commits_w_zero_version.__name__ ), cli_args, next_release_version, ) for cli_args, next_release_version in ( # New build-metadata forces a new release (["--build-metadata", "build.12345"], "0.1.0+build.12345"), # Forced version bump (["--prerelease"], "0.0.0-rc.1"), (["--patch"], "0.0.1"), (["--minor"], "0.1.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata (["--patch", "--build-metadata", "build.12345"], "0.0.1+build.12345"), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.0.0-rc.1"), (["--patch", "--as-prerelease"], "0.0.1-rc.1"), (["--minor", "--as-prerelease"], "0.1.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.0.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.0.1-beta.1+build.12345", ), ) ), *[ pytest.param( lazy_fixture(repo_fixture_name), cli_args, expected_stdout, marks=pytest.mark.comprehensive, ) for repo_fixture_name, values in { repo_w_trunk_only_conventional_commits.__name__: [ # New build-metadata forces a new release (["--build-metadata", "build.12345"], "0.1.1+build.12345"), # Forced version bump (["--prerelease"], "0.1.1-rc.1"), (["--patch"], "0.1.2"), (["--minor"], "0.2.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata ( ["--patch", "--build-metadata", "build.12345"], "0.1.2+build.12345", ), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.1.1-rc.1"), (["--patch", "--as-prerelease"], "0.1.2-rc.1"), (["--minor", "--as-prerelease"], "0.2.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.1.2-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.1.2-beta.1+build.12345", ), ], repo_w_trunk_only_n_prereleases_conventional_commits.__name__: [ # New build-metadata forces a new release (["--build-metadata", "build.12345"], "0.2.0+build.12345"), # Forced version bump # NOTE: There is already a 0.2.0-rc.1 (["--prerelease"], "0.2.0-rc.2"), (["--patch"], "0.2.1"), (["--minor"], "0.3.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata ( ["--patch", "--build-metadata", "build.12345"], "0.2.1+build.12345", ), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.2.0-rc.2"), (["--patch", "--as-prerelease"], "0.2.1-rc.1"), (["--minor", "--as-prerelease"], "0.3.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.2.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.2.1-beta.1+build.12345", ), ], repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__: [ # New build-metadata forces a new release (["--build-metadata", "build.12345"], "1.1.0+build.12345"), # Forced version bump (["--prerelease"], "1.1.0-rc.1"), (["--patch"], "1.1.1"), (["--minor"], "1.2.0"), (["--major"], "2.0.0"), # Forced version bump with --build-metadata ( ["--patch", "--build-metadata", "build.12345"], "1.1.1+build.12345", ), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "1.1.0-rc.1"), (["--patch", "--as-prerelease"], "1.1.1-rc.1"), (["--minor", "--as-prerelease"], "1.2.0-rc.1"), (["--major", "--as-prerelease"], "2.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "1.1.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "1.1.1-beta.1+build.12345", ), ], repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__: [ # New build-metadata forces a new release (["--build-metadata", "build.12345"], "1.2.0-alpha.2+build.12345"), # Forced version bump (["--prerelease"], "1.2.0-alpha.3"), (["--patch"], "1.2.1"), (["--minor"], "1.3.0"), (["--major"], "2.0.0"), # Forced version bump with --build-metadata ( ["--patch", "--build-metadata", "build.12345"], "1.2.1+build.12345", ), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "1.2.0-alpha.3"), (["--patch", "--as-prerelease"], "1.2.1-alpha.1"), (["--minor", "--as-prerelease"], "1.3.0-alpha.1"), (["--major", "--as-prerelease"], "2.0.0-alpha.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "1.2.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "1.2.1-beta.1+build.12345", ), ], repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__: [ # New build-metadata forces a new release (["--build-metadata", "build.12345"], "1.1.0+build.12345"), # Forced version bump (["--prerelease"], "1.1.0-rc.3"), (["--patch"], "1.1.1"), (["--minor"], "1.2.0"), (["--major"], "2.0.0"), # Forced version bump with --build-metadata ( ["--patch", "--build-metadata", "build.12345"], "1.1.1+build.12345", ), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "1.1.0-rc.3"), (["--patch", "--as-prerelease"], "1.1.1-rc.1"), (["--minor", "--as-prerelease"], "1.2.0-rc.1"), (["--major", "--as-prerelease"], "2.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "1.1.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "1.1.1-beta.1+build.12345", ), ], }.items() for (cli_args, expected_stdout) in values ], ], ) def test_version_force_level( repo_result: BuiltRepoResult, cli_args: list[str], next_release_version: str, example_project_dir: ExProjectDir, example_pyproject_toml: Path, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, pyproject_toml_file: Path, changelog_md_file: Path, get_expected_version_py_file_content: GetExpectedVersionPyFileContentFn, ): # Force clean directory state before test (needed for the repo_w_no_tags) repo = repo_result["repo"] repo.git.reset("HEAD", hard=True) version_file = example_project_dir.joinpath( "src", EXAMPLE_PROJECT_NAME, "_version.py" ) expected_changed_files = sorted( [ str(changelog_md_file), str(pyproject_toml_file), str(version_file.relative_to(example_project_dir)), ] ) expected_version_py_content = get_expected_version_py_file_content( next_release_version ) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} pyproject_toml_before = tomlkit.loads( example_pyproject_toml.read_text(encoding="utf-8") ) # Modify the pyproject.toml to remove the version so we can compare it later pyproject_toml_before.get("tool", {}).get("poetry").pop("version") # type: ignore[attr-defined] # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *cli_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) differing_files = [ # Make sure filepath uses os specific path separators str(Path(file)) for file in str(repo.git.diff("HEAD", "HEAD~1", name_only=True)).splitlines() ] pyproject_toml_after = tomlkit.loads( example_pyproject_toml.read_text(encoding="utf-8") ) pyproj_version_after = ( pyproject_toml_after.get("tool", {}).get("poetry", {}).pop("version") ) # Load python module for reading the version (ensures the file is valid) actual_version_py_content = version_file.read_text() # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred # Changelog already reflects changes this should introduce assert expected_changed_files == differing_files # Compare pyproject.toml assert pyproject_toml_before == pyproject_toml_after assert next_release_version == pyproj_version_after # Compare _version.py assert expected_version_py_content == actual_version_py_content # Verify content is parsable & importable dynamic_version = dynamic_python_import( version_file, f"{EXAMPLE_PROJECT_NAME}._version" ).__version__ assert next_release_version == dynamic_version # NOTE: There is a bit of a corner-case where if we are not doing a # prerelease, we will get a full version based on already-released commits. # So for example, commits that wouldn't trigger a release on a prerelease branch # won't trigger a release if prerelease=true; however, when commits included in a # prerelease branch are merged to a release branch, prerelease=False - so a feat commit # which previously triggered a prerelease on a branch will subsequently trigger a full # release when merged to a full release branch where prerelease=False. # For this reason a couple of these test cases predict a new version even when the # commits being added here don't induce a version bump. @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ ( # Default case should be a minor bump since last full release was 1.1.1 # last tag is a prerelease 1.2.0-rc.2 lazy_fixture( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__ ), lazy_fixture(conventional_minor_commits.__name__), False, "alpha", "1.2.0", "main", ), *[ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, "alpha", ): [ (conventional_patch_commits.__name__, False, "1.1.2", None), ( conventional_patch_commits.__name__, True, "1.1.2-alpha.1", None, ), ( conventional_minor_commits.__name__, True, "1.2.0-alpha.3", "feat/feature-4", # branch ), (conventional_major_commits.__name__, False, "2.0.0", None), ( conventional_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, "alpha", ): [ (conventional_patch_commits.__name__, False, "1.1.1", None), ( conventional_patch_commits.__name__, True, "1.1.1-alpha.1", None, ), (conventional_minor_commits.__name__, False, "1.2.0", None), ( conventional_minor_commits.__name__, True, "1.2.0-alpha.1", None, ), (conventional_major_commits.__name__, False, "2.0.0", None), ( conventional_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ], ] ), ) # TODO: add a github flow test case def test_version_next_greater_than_version_one_conventional( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, run_cli: RunCliFn, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, "alpha", ): [ *( (commits, True, "1.2.0-alpha.2", "feat/feature-4") for commits in ( None, conventional_chore_commits.__name__, ) ), *( (commits, False, "1.1.1", None) for commits in ( None, conventional_chore_commits.__name__, ) ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, "alpha", ): [ *( (commits, prerelease, "1.1.0", None) for prerelease in (True, False) for commits in ( None, conventional_chore_commits.__name__, ) ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ], ] ), ) def test_version_next_greater_than_version_one_no_bump_conventional( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, run_cli: RunCliFn, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, "alpha", ): [ (emoji_patch_commits.__name__, False, "1.1.2", None), ( emoji_patch_commits.__name__, True, "1.1.2-alpha.1", None, ), ( emoji_minor_commits.__name__, False, "1.2.0", None, ), ( emoji_minor_commits.__name__, True, "1.2.0-alpha.3", "feat/feature-4", # branch ), (emoji_major_commits.__name__, False, "2.0.0", None), ( emoji_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, "alpha", ): [ (emoji_patch_commits.__name__, False, "1.1.1", None), ( emoji_patch_commits.__name__, True, "1.1.1-alpha.1", None, ), (emoji_minor_commits.__name__, False, "1.2.0", None), ( emoji_minor_commits.__name__, True, "1.2.0-alpha.1", None, ), (emoji_major_commits.__name__, False, "2.0.0", None), ( emoji_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ] ), ) def test_version_next_greater_than_version_one_emoji( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, run_cli: RunCliFn, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, "alpha", ): [ *( (commits, True, "1.2.0-alpha.2", "feat/feature-4") for commits in ( None, emoji_chore_commits.__name__, ) ), *( (commits, False, "1.1.1", None) for commits in ( None, emoji_chore_commits.__name__, ) ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, "alpha", ): [ *( (commits, prerelease, "1.1.0", None) for prerelease in (True, False) for commits in ( None, emoji_chore_commits.__name__, ) ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ], ] ), ) def test_version_next_greater_than_version_one_no_bump_emoji( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, run_cli: RunCliFn, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, "alpha", ): [ (scipy_patch_commits.__name__, False, "1.1.2", None), ( scipy_patch_commits.__name__, True, "1.1.2-alpha.1", None, ), ( scipy_minor_commits.__name__, False, "1.2.0", None, ), ( scipy_minor_commits.__name__, True, "1.2.0-alpha.3", "feat/feature-4", # branch ), (scipy_major_commits.__name__, False, "2.0.0", None), ( scipy_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, "alpha", ): [ (scipy_patch_commits.__name__, False, "1.1.1", None), ( scipy_patch_commits.__name__, True, "1.1.1-alpha.1", None, ), (scipy_minor_commits.__name__, False, "1.2.0", None), ( scipy_minor_commits.__name__, True, "1.2.0-alpha.1", None, ), (scipy_major_commits.__name__, False, "2.0.0", None), ( scipy_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ], ), ) def test_version_next_greater_than_version_one_scipy( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, run_cli: RunCliFn, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, "alpha", ): [ *( (commits, True, "1.2.0-alpha.2", "feat/feature-4") for commits in ( None, scipy_chore_commits.__name__, ) ), *( (commits, False, "1.1.1", None) for commits in ( None, scipy_chore_commits.__name__, ) ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, "alpha", ): [ *( (commits, prerelease, "1.1.0", None) for prerelease in (True, False) for commits in ( None, scipy_chore_commits.__name__, ) ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ], ] ), ) def test_version_next_greater_than_version_one_no_bump_scipy( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, run_cli: RunCliFn, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release # ============================================================================= # # Zero Dot version tests (ex. 0.x.y versions) # ============================================================================= # @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ ( # Latest version for repo_with_no_tags is currently 0.0.0 (default) # It's biggest change type is minor, so the next version should be 0.1.0 # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump lazy_fixture(repo_w_no_tags_conventional_commits.__name__), lazy_fixture(conventional_major_commits.__name__), False, "rc", False, True, "0.1.0", "main", ), *[ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_no_tags is currently 0.0.0 (default) # It's biggest change type is minor, so the next version should be 0.1.0 ( repo_w_no_tags_conventional_commits.__name__, None, ): [ *( # when prerelease is False, & major_on_zero is False & # allow_zero_version is True, the version should be # 0.1.0, with the given commits (commits, False, False, True, "0.1.0", None) for commits in ( # Even when this test does not change anything, the base modification # will be a minor change and thus the version will be bumped to 0.1.0 None, # Non version bumping commits are absorbed into the previously detected minor bump lazy_fixture(conventional_chore_commits.__name__), # Patch commits are absorbed into the previously detected minor bump lazy_fixture(conventional_patch_commits.__name__), # Minor level commits are absorbed into the previously detected minor bump lazy_fixture(conventional_minor_commits.__name__), # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump # lazy_fixture(conventional_major_commits.__name__), # used as default ) ), # when prerelease is False, & major_on_zero is False, & allow_zero_version is True, # the version should only be minor bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(conventional_major_commits.__name__), False, False, True, "0.1.0", None, ), # when prerelease is False, & major_on_zero is True & allow_zero_version is True, # the version should be major bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(conventional_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is False, & allow_zero_version is False, the version should be # 1.0.0, across the board because 0 is not a valid major version. # major_on_zero is ignored as it is not relevant but tested for completeness (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(conventional_chore_commits.__name__), lazy_fixture(conventional_patch_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_conventional_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is patch bumped because of the patch level commits # regardless of the major_on_zero value ( lazy_fixture(conventional_patch_commits.__name__), False, major_on_zero, True, "0.1.2", None, ) for major_on_zero in (True, False) ), *( # when prerelease must be False, and allow_zero_version is True, # the version is minor bumped because of the major_on_zero value=False (commits, False, False, True, "0.2.0", None) for commits in ( lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), # when prerelease must be False, and allow_zero_version is True, # but the major_on_zero is True, then when a major level commit is given, # the version should be bumped to the next major version ( lazy_fixture(conventional_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease must be False, & allow_zero_version is False, the version should be # 1.0.0, with any change regardless of major_on_zero (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(conventional_chore_commits.__name__), lazy_fixture(conventional_patch_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_conventional_commits.__name__, None, ): [ # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped as a prerelease version, when given patch level commits ( lazy_fixture(conventional_patch_commits.__name__), True, False, True, "0.2.1-rc.1", None, ), # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped, when given patch level commits ( lazy_fixture(conventional_patch_commits.__name__), False, False, True, "0.2.1", None, ), *( # when allow_zero_version is True, # prerelease is True, & major_on_zero is False, the version should be # minor bumped as a prerelease version, when given commits of a minor or major level (commits, True, False, True, "0.3.0-rc.1", None) for commits in ( lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), *( # when allow_zero_version is True, prerelease is True, & major_on_zero # is False, the version should be minor bumped, when given commits of a # minor or major level because major_on_zero = False (commits, False, False, True, "0.3.0", None) for commits in ( lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), # when prerelease is True, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0 as a prerelease version, when # given major level commits ( lazy_fixture(conventional_major_commits.__name__), True, True, True, "1.0.0-rc.1", None, ), # when prerelease is False, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0, when given major level commits ( lazy_fixture(conventional_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0 as a prerelease version, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, True, major_on_zero, False, "1.0.0-rc.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(conventional_chore_commits.__name__), lazy_fixture(conventional_patch_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( lazy_fixture(conventional_patch_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ], ), ) def test_version_next_w_zero_dot_versions_conventional( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, run_cli: RunCliFn, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_conventional_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is not bumped because of non valuable changes regardless # of the major_on_zero value (commits, False, major_on_zero, True, "0.1.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(conventional_chore_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_conventional_commits.__name__, None, ): [ *( # when allow_zero_version is True, the version is not bumped # regardless of prerelease and major_on_zero values when given # non valuable changes (commits, prerelease, major_on_zero, True, "0.2.0", None) for prerelease in (True, False) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(conventional_chore_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ], ), ) def test_version_next_w_zero_dot_versions_no_bump_conventional( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, run_cli: RunCliFn, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_no_tags is currently 0.0.0 (default) # It's biggest change type is minor, so the next version should be 0.1.0 ( repo_w_no_tags_emoji_commits.__name__, None, ): [ *( # when prerelease is False, & major_on_zero is False & # allow_zero_version is True, the version should be # 0.1.0, with the given commits (commits, False, False, True, "0.1.0", None) for commits in ( # Even when this test does not change anything, the base modification # will be a minor change and thus the version will be bumped to 0.1.0 None, # Non version bumping commits are absorbed into the previously detected minor bump lazy_fixture(emoji_chore_commits.__name__), # Patch commits are absorbed into the previously detected minor bump lazy_fixture(emoji_patch_commits.__name__), # Minor level commits are absorbed into the previously detected minor bump lazy_fixture(emoji_minor_commits.__name__), # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump lazy_fixture(emoji_major_commits.__name__), ) ), # when prerelease is False, & major_on_zero is False, & allow_zero_version is True, # the version should only be minor bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(emoji_major_commits.__name__), False, False, True, "0.1.0", None, ), # when prerelease is False, & major_on_zero is True & allow_zero_version is True, # the version should be major bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(emoji_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is False, & allow_zero_version is False, the version should be # 1.0.0, across the board because 0 is not a valid major version. # major_on_zero is ignored as it is not relevant but tested for completeness (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(emoji_chore_commits.__name__), lazy_fixture(emoji_patch_commits.__name__), lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_emoji_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is patch bumped because of the patch level commits # regardless of the major_on_zero value ( lazy_fixture(emoji_patch_commits.__name__), False, major_on_zero, True, "0.1.2", None, ) for major_on_zero in (True, False) ), *( # when prerelease must be False, and allow_zero_version is True, # the version is minor bumped because of the major_on_zero value=False (commits, False, False, True, "0.2.0", None) for commits in ( lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), # when prerelease must be False, and allow_zero_version is True, # but the major_on_zero is True, then when a major level commit is given, # the version should be bumped to the next major version ( lazy_fixture(emoji_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease must be False, & allow_zero_version is False, the version should be # 1.0.0, with any change regardless of major_on_zero (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(emoji_chore_commits.__name__), lazy_fixture(emoji_patch_commits.__name__), lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_emoji_commits.__name__, None, ): [ # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped as a prerelease version, when given patch level commits ( lazy_fixture(emoji_patch_commits.__name__), True, False, True, "0.2.1-rc.1", None, ), # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped, when given patch level commits ( lazy_fixture(emoji_patch_commits.__name__), False, False, True, "0.2.1", None, ), *( # when allow_zero_version is True, # prerelease is True, & major_on_zero is False, the version should be # minor bumped as a prerelease version, when given commits of a minor or major level (commits, True, False, True, "0.3.0-rc.1", None) for commits in ( lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), *( # when allow_zero_version is True, prerelease is True, & major_on_zero # is False, the version should be minor bumped, when given commits of a # minor or major level because major_on_zero = False (commits, False, False, True, "0.3.0", None) for commits in ( lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), # when prerelease is True, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0 as a prerelease version, when # given major level commits ( lazy_fixture(emoji_major_commits.__name__), True, True, True, "1.0.0-rc.1", None, ), # when prerelease is False, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0, when given major level commits ( lazy_fixture(emoji_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0 as a prerelease version, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, True, major_on_zero, False, "1.0.0-rc.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(emoji_chore_commits.__name__), lazy_fixture(emoji_patch_commits.__name__), lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( lazy_fixture(emoji_patch_commits.__name__), lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ), ) def test_version_next_w_zero_dot_versions_emoji( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, run_cli: RunCliFn, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_emoji_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is not bumped because of non valuable changes regardless # of the major_on_zero value (commits, False, major_on_zero, True, "0.1.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(emoji_chore_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_emoji_commits.__name__, None, ): [ *( # when allow_zero_version is True, the version is not bumped # regardless of prerelease and major_on_zero values when given # non valuable changes (commits, prerelease, major_on_zero, True, "0.2.0", None) for prerelease in (True, False) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(emoji_chore_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ], ), ) def test_version_next_w_zero_dot_versions_no_bump_emoji( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, run_cli: RunCliFn, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_no_tags is currently 0.0.0 (default) # It's biggest change type is minor, so the next version should be 0.1.0 ( repo_w_no_tags_scipy_commits.__name__, None, ): [ *( # when prerelease is False, & major_on_zero is False & # allow_zero_version is True, the version should be # 0.1.0, with the given commits (commits, False, False, True, "0.1.0", None) for commits in ( # Even when this test does not change anything, the base modification # will be a minor change and thus the version will be bumped to 0.1.0 None, # Non version bumping commits are absorbed into the previously detected minor bump lazy_fixture(scipy_chore_commits.__name__), # Patch commits are absorbed into the previously detected minor bump lazy_fixture(scipy_patch_commits.__name__), # Minor level commits are absorbed into the previously detected minor bump lazy_fixture(scipy_minor_commits.__name__), # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump lazy_fixture(scipy_major_commits.__name__), ) ), # when prerelease is False, & major_on_zero is False, & allow_zero_version is True, # the version should only be minor bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(scipy_major_commits.__name__), False, False, True, "0.1.0", None, ), # when prerelease is False, & major_on_zero is True & allow_zero_version is True, # the version should be major bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(scipy_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is False, & allow_zero_version is False, the version should be # 1.0.0, across the board because 0 is not a valid major version. # major_on_zero is ignored as it is not relevant but tested for completeness (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(scipy_chore_commits.__name__), lazy_fixture(scipy_patch_commits.__name__), lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_scipy_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is patch bumped because of the patch level commits # regardless of the major_on_zero value ( lazy_fixture(scipy_patch_commits.__name__), False, major_on_zero, True, "0.1.2", None, ) for major_on_zero in (True, False) ), *( # when prerelease must be False, and allow_zero_version is True, # the version is minor bumped because of the major_on_zero value=False (commits, False, False, True, "0.2.0", None) for commits in ( lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), # when prerelease must be False, and allow_zero_version is True, # but the major_on_zero is True, then when a major level commit is given, # the version should be bumped to the next major version ( lazy_fixture(scipy_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease must be False, & allow_zero_version is False, the version should be # 1.0.0, with any change regardless of major_on_zero (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(scipy_chore_commits.__name__), lazy_fixture(scipy_patch_commits.__name__), lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_scipy_commits.__name__, None, ): [ # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped as a prerelease version, when given patch level commits ( lazy_fixture(scipy_patch_commits.__name__), True, False, True, "0.2.1-rc.1", None, ), # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped, when given patch level commits ( lazy_fixture(scipy_patch_commits.__name__), False, False, True, "0.2.1", None, ), *( # when allow_zero_version is True, # prerelease is True, & major_on_zero is False, the version should be # minor bumped as a prerelease version, when given commits of a minor or major level (commits, True, False, True, "0.3.0-rc.1", None) for commits in ( lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), *( # when allow_zero_version is True, prerelease is True, & major_on_zero # is False, the version should be minor bumped, when given commits of a # minor or major level because major_on_zero = False (commits, False, False, True, "0.3.0", None) for commits in ( lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), # when prerelease is True, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0 as a prerelease version, when # given major level commits ( lazy_fixture(scipy_major_commits.__name__), True, True, True, "1.0.0-rc.1", None, ), # when prerelease is False, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0, when given major level commits ( lazy_fixture(scipy_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0 as a prerelease version, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, True, major_on_zero, False, "1.0.0-rc.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(scipy_chore_commits.__name__), lazy_fixture(scipy_patch_commits.__name__), lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( lazy_fixture(scipy_patch_commits.__name__), lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ), ) def test_version_next_w_zero_dot_versions_scipy( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, run_cli: RunCliFn, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_scipy_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is not bumped because of non valuable changes regardless # of the major_on_zero value (commits, False, major_on_zero, True, "0.1.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(scipy_chore_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_scipy_commits.__name__, None, ): [ *( # when allow_zero_version is True, the version is not bumped # regardless of prerelease and major_on_zero values when given # non valuable changes (commits, prerelease, major_on_zero, True, "0.2.0", None) for prerelease in (True, False) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(scipy_chore_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ], ), ) def test_version_next_w_zero_dot_versions_no_bump_scipy( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, run_cli: RunCliFn, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( str.join( " ,", [ "repo_result", "commit_parser", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ ( # Latest version for repo_w_initial_commit is currently 0.0.0 # with no changes made it should be 0.0.0 lazy_fixture(repo_w_initial_commit.__name__), ConventionalCommitParser.__name__.replace("CommitParser", "").lower(), None, False, "rc", False, True, "0.0.0", "main", ), *[ pytest.param( lazy_fixture(repo_w_initial_commit.__name__), str.replace(parser_class_name, "CommitParser", "").lower(), commit_messages, prerelease, prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main", marks=pytest.mark.comprehensive, ) for prerelease_token, values in { # Latest version for repo_with_no_tags is currently 0.0.0 (default) # It's biggest change type is minor, so the next version should be 0.1.0 "rc": [ *( # when prerelease is False, major_on_zero is True & False, & allow_zero_version is True # the version should be 0.0.0, when no distintive changes have been made since the # start of the project (commits, parser, prerelease, major_on_zero, True, "0.0.0") for prerelease in (True, False) for major_on_zero in (True, False) for commits, parser in ( # No commits added, so base is just initial commit at 0.0.0 (None, ConventionalCommitParser.__name__), # Chore like commits also don't trigger a version bump so it stays 0.0.0 ( lazy_fixture(conventional_chore_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_chore_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_chore_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( (commits, parser, True, major_on_zero, True, "0.0.1-rc.1") for major_on_zero in (True, False) for commits, parser in ( # when prerelease is True & allow_zero_version is True, the version should be # a patch bump as a prerelease version, because of the patch level commits # major_on_zero is irrelevant here as we are only applying patch commits ( lazy_fixture(conventional_patch_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_patch_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_patch_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( (commits, parser, False, major_on_zero, True, "0.0.1") for major_on_zero in (True, False) for commits, parser in ( # when prerelease is False, & allow_zero_version is True, the version should be # a patch bump because of the patch commits added # major_on_zero is irrelevant here as we are only applying patch commits ( lazy_fixture(conventional_patch_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_patch_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_patch_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( (commits, parser, True, False, True, "0.1.0-rc.1") for commits, parser in ( # when prerelease is False, & major_on_zero is False, the version should be # a minor bump because of the minor commits added ( lazy_fixture(conventional_minor_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_minor_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_minor_commits.__name__), ScipyCommitParser.__name__, ), # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump ( lazy_fixture(conventional_major_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_major_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_major_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( (commits, parser, False, False, True, "0.1.0") for commits, parser in ( # when prerelease is False, # major_on_zero is False, & allow_zero_version is True # the version should be a minor bump of 0.0.0 # because of the minor commits added and zero version is allowed ( lazy_fixture(conventional_minor_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_minor_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_minor_commits.__name__), ScipyCommitParser.__name__, ), # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump ( lazy_fixture(conventional_major_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_major_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_major_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( # when prerelease is True, & allow_zero_version is False, the version should be # a prerelease version 1.0.0-rc.1, across the board when any valuable change # is made because of the allow_zero_version is False, major_on_zero is ignored # when allow_zero_version is False (but we still test it) (commits, parser, True, major_on_zero, False, "1.0.0-rc.1") for major_on_zero in (True, False) for commits, parser in ( # parser doesn't matter here as long as it detects a NO_RELEASE on Initial Commit (None, ConventionalCommitParser.__name__), ( lazy_fixture(conventional_chore_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_patch_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_minor_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_major_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_chore_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_patch_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_minor_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_major_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_chore_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_patch_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_minor_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_major_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( # when prerelease is True, & allow_zero_version is False, the version should be # 1.0.0, across the board when any valuable change # is made because of the allow_zero_version is False. major_on_zero is ignored # when allow_zero_version is False (but we still test it) (commits, parser, False, major_on_zero, False, "1.0.0") for major_on_zero in (True, False) for commits, parser in ( (None, ConventionalCommitParser.__name__), ( lazy_fixture(conventional_chore_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_patch_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_minor_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_major_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_chore_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_patch_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_minor_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_major_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_chore_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_patch_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_minor_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_major_commits.__name__), ScipyCommitParser.__name__, ), ) ), ], }.items() for ( commit_messages, parser_class_name, prerelease, major_on_zero, allow_zero_version, next_release_version, ) in values # type: ignore[attr-defined] ], ], ), ) def test_version_next_w_zero_dot_versions_minimums( repo_result: BuiltRepoResult, commit_parser: str, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, run_cli: RunCliFn, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml("tool.semantic_release.commit_parser", commit_parser) update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred python-semantic-release-10.4.1/tests/e2e/cmd_version/test_version_changelog.py000066400000000000000000000771731506116242600276110ustar00rootroot00000000000000from __future__ import annotations import os from datetime import datetime, timezone from typing import TYPE_CHECKING import pytest from freezegun import freeze_time from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.changelog.context import ChangelogMode from semantic_release.cli.config import ChangelogOutputFormat from tests.const import MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.example_project import ( default_md_changelog_insertion_flag, default_rst_changelog_insertion_flag, example_changelog_md, example_changelog_rst, ) from tests.fixtures.repos import ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits, repo_w_github_flow_w_default_release_channel_conventional_commits, repo_w_github_flow_w_default_release_channel_emoji_commits, repo_w_github_flow_w_default_release_channel_scipy_commits, repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_github_flow_w_feature_release_channel_emoji_commits, repo_w_github_flow_w_feature_release_channel_scipy_commits, repo_w_no_tags_conventional_commits, repo_w_no_tags_conventional_commits_unmasked_initial_release, repo_w_no_tags_emoji_commits, repo_w_no_tags_scipy_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_emoji_commits, repo_w_trunk_only_n_prereleases_conventional_commits, repo_w_trunk_only_n_prereleases_emoji_commits, repo_w_trunk_only_n_prereleases_scipy_commits, repo_w_trunk_only_scipy_commits, ) from tests.util import assert_successful_exit_code if TYPE_CHECKING: from pathlib import Path from tests.conftest import ( FormatDateStrFn, GetCachedRepoDataFn, GetStableDateNowFn, RunCliFn, ) from tests.fixtures.example_project import UpdatePyprojectTomlFn from tests.fixtures.git_repo import ( BuiltRepoResult, CommitConvention, GetCommitsFromRepoBuildDefFn, GetVersionsFromRepoBuildDefFn, ) @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result, repo_fixture_name, tag_format", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), repo_w_trunk_only_conventional_commits.__name__, "v{version}", ), *[ pytest.param( lazy_fixture(repo_fixture), repo_fixture, "v{version}" if tag_format is None else tag_format, marks=pytest.mark.comprehensive, ) for repo_fixture, tag_format in [ # Must have a previous release/tag *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ # repo_with_single_branch_conventional_commits.__name__, # default repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_emoji_commits.__name__, repo_w_trunk_only_n_prereleases_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, repo_w_github_flow_w_default_release_channel_emoji_commits.__name__, repo_w_github_flow_w_default_release_channel_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_emoji_commits.__name__, repo_w_github_flow_w_feature_release_channel_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, "submod-v{version}", ) for repo_fixture_name in [ repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__, ] ], ] ], ], ) def test_version_updates_changelog_w_new_version( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, tag_format: str, update_pyproject_toml: UpdatePyprojectTomlFn, run_cli: RunCliFn, changelog_file: Path, insertion_flag: str, repo_fixture_name: str, stable_now_date: GetStableDateNowFn, get_cached_repo_data: GetCachedRepoDataFn, ): """ Given a previously released custom modified changelog file, When the version command is run with changelog.mode set to "update", Then the version is created and the changelog file is updated with new release info while maintaining the previously customized content """ repo = repo_result["repo"] latest_tag = tag_format.format( version=get_versions_from_repo_build_def(repo_result["definition"])[-1] ) if not (repo_build_data := get_cached_repo_data(repo_fixture_name)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) # Custom text to maintain (must be different from the default) custom_text = "---{ls}{ls}Custom footer text{ls}".format(ls=os.linesep) # Capture expected changelog content with changelog_file.open(newline=os.linesep) as rfd: initial_changelog_parts = rfd.read().split(insertion_flag) expected_changelog_content = str.join( insertion_flag, [ initial_changelog_parts[0], str.join( os.linesep, [ initial_changelog_parts[1], "", custom_text, ], ), ], ) # Reverse last release repo.git.tag("-d", latest_tag) repo.git.reset("--hard", "HEAD~1") # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Modify the current changelog with our custom text at bottom # Universal newlines is ok here since we are writing it back out # and not working with the os-specific insertion flag changelog_file.write_text( str.join( "\n", [ changelog_file.read_text(), "", custom_text, ], ) ) with freeze_time(now_datetime.astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] result = run_cli(cli_cmd[1:]) # Capture the new changelog content (os aware because of expected content) with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Evaluate assert_successful_exit_code(result, cli_cmd) assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_format, changelog_file, insertion_flag", [ ( ChangelogOutputFormat.MARKDOWN, lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( ChangelogOutputFormat.RESTRUCTURED_TEXT, lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result, repo_fixture_name", [ ( lazy_fixture(repo_w_no_tags_conventional_commits.__name__), repo_w_no_tags_conventional_commits.__name__, ), *[ pytest.param( lazy_fixture(repo_fixture), repo_fixture, marks=pytest.mark.comprehensive, ) for repo_fixture in [ # Must not have a single release/tag # repo_with_no_tags_conventional_commits.__name__, # default repo_w_no_tags_emoji_commits.__name__, repo_w_no_tags_scipy_commits.__name__, ] ], ], ) def test_version_updates_changelog_wo_prev_releases( repo_result: BuiltRepoResult, repo_fixture_name: str, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_format: ChangelogOutputFormat, changelog_file: Path, insertion_flag: str, stable_now_date: GetStableDateNowFn, format_date_str: FormatDateStrFn, get_cached_repo_data: GetCachedRepoDataFn, ): """ Given the repository has no releases and the user has provided a initialized changelog, When the version command is run with changelog.mode set to "update", Then the version is created and the changelog file is updated with only an initial release statement """ if not (repo_build_data := get_cached_repo_data(repo_fixture_name)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) repo_build_date_str = format_date_str(now_datetime) # Custom text to maintain (must be different from the default) custom_text = "---{ls}{ls}Custom footer text{ls}".format(ls=os.linesep) # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) version = "v1.0.0" rst_version_header = f"{version} ({repo_build_date_str})" txt_after_insertion_flag = { ChangelogOutputFormat.MARKDOWN: str.join( os.linesep, [ f"## {version} ({repo_build_date_str})", "", "- Initial Release", ], ), ChangelogOutputFormat.RESTRUCTURED_TEXT: str.join( os.linesep, [ f".. _changelog-{version}:", "", rst_version_header, f"{'=' * len(rst_version_header)}", "", "* Initial Release", ], ), } # Capture and modify the current changelog content to become the expected output # We much use os.linesep here since the insertion flag is os-specific with changelog_file.open(newline=os.linesep) as rfd: initial_changelog_parts = rfd.read().split(insertion_flag) # content is os-specific because of the insertion flag & how we read the original file expected_changelog_content = str.join( insertion_flag, [ initial_changelog_parts[0], str.join( os.linesep, [ os.linesep, txt_after_insertion_flag[changelog_format], "", custom_text, ], ), ], ) # Grab the Unreleased changelog & create the initialized user changelog # force output to not perform any newline translations with changelog_file.open(mode="w", newline="") as wfd: wfd.write( str.join( insertion_flag, [initial_changelog_parts[0], f"{os.linesep * 2}{custom_text}"], ) ) wfd.flush() # Act with freeze_time(now_datetime.astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() # Capture the new changelog content (os aware because of expected content) with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Check that the changelog footer is maintained and updated with Unreleased info assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_format, changelog_file, insertion_flag", [ ( ChangelogOutputFormat.MARKDOWN, lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( ChangelogOutputFormat.RESTRUCTURED_TEXT, lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result, repo_fixture_name", [ pytest.param( lazy_fixture(repo_fixture), repo_fixture, marks=pytest.mark.comprehensive, ) for repo_fixture in [ # Must not have a single release/tag repo_w_no_tags_conventional_commits_unmasked_initial_release.__name__, ] ], ) def test_version_updates_changelog_wo_prev_releases_n_unmasked_initial_release( repo_result: BuiltRepoResult, repo_fixture_name: str, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_format: ChangelogOutputFormat, changelog_file: Path, insertion_flag: str, stable_now_date: GetStableDateNowFn, format_date_str: FormatDateStrFn, get_cached_repo_data: GetCachedRepoDataFn, ): """ Given the repository has no releases and the user has provided a initialized changelog, When the version command is run with changelog.mode set to "update", Then the version is created and the changelog file is updated with new release info """ if not (repo_build_data := get_cached_repo_data(repo_fixture_name)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) repo_build_date_str = format_date_str(now_datetime) # Custom text to maintain (must be different from the default) custom_text = "---{ls}{ls}Custom footer text{ls}".format(ls=os.linesep) # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) version = "v1.0.0" rst_version_header = f"{version} ({repo_build_date_str})" search_n_replacements = { ChangelogOutputFormat.MARKDOWN: ( "## Unreleased", f"## {version} ({repo_build_date_str})", ), ChangelogOutputFormat.RESTRUCTURED_TEXT: ( ".. _changelog-unreleased:{ls}{ls}Unreleased{ls}{underline}".format( ls=os.linesep, underline="=" * len("Unreleased"), ), str.join( os.linesep, [ f".. _changelog-{version}:", "", rst_version_header, f"{'=' * len(rst_version_header)}", ], ), ), } search_text = search_n_replacements[changelog_format][0] replacement_text = search_n_replacements[changelog_format][1] # Capture and modify the current changelog content to become the expected output # We much use os.linesep here since the insertion flag is os-specific with changelog_file.open(newline=os.linesep) as rfd: initial_changelog_parts = rfd.read().split(insertion_flag) # content is os-specific because of the insertion flag & how we read the original file expected_changelog_content = str.join( insertion_flag, [ initial_changelog_parts[0], str.join( os.linesep, [ initial_changelog_parts[1].replace( search_text, replacement_text, ), "", custom_text, ], ), ], ) # Grab the Unreleased changelog & create the initialized user changelog # force output to not perform any newline translations with changelog_file.open(mode="w", newline="") as wfd: wfd.write( str.join( insertion_flag, [initial_changelog_parts[0], f"{os.linesep * 2}{custom_text}"], ) ) wfd.flush() # Act with freeze_time(now_datetime.astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() # Capture the new changelog content (os aware because of expected content) with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Check that the changelog footer is maintained and updated with Unreleased info assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file", [ lazy_fixture(example_changelog_md.__name__), lazy_fixture(example_changelog_rst.__name__), ], ) @pytest.mark.parametrize( "repo_result, repo_fixture_name, tag_format", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), repo_w_trunk_only_conventional_commits.__name__, "v{version}", ), *[ pytest.param( lazy_fixture(repo_fixture), repo_fixture, "v{version}" if tag_format is None else tag_format, marks=pytest.mark.comprehensive, ) for repo_fixture, tag_format in [ # Must have a previous release/tag *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ # repo_with_single_branch_conventional_commits.__name__, # default repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_emoji_commits.__name__, repo_w_trunk_only_n_prereleases_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, repo_w_github_flow_w_default_release_channel_emoji_commits.__name__, repo_w_github_flow_w_default_release_channel_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_emoji_commits.__name__, repo_w_github_flow_w_feature_release_channel_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, "submod-v{version}", ) for repo_fixture_name in [ repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__, ] ], ] ], ], ) def test_version_initializes_changelog_in_update_mode_w_no_prev_changelog( repo_result: BuiltRepoResult, repo_fixture_name: str, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, tag_format: str, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, stable_now_date: GetStableDateNowFn, get_cached_repo_data: GetCachedRepoDataFn, ): """ Given that the changelog file does not exist, When the version command is run with changelog.mode set to "update", Then the version is created and the changelog file is initialized with the default content. """ repo = repo_result["repo"] latest_tag = tag_format.format( version=get_versions_from_repo_build_def(repo_result["definition"])[-1] ) if not (repo_build_data := get_cached_repo_data(repo_fixture_name)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) # Capture the expected changelog content expected_changelog_content = changelog_file.read_text() # Reverse last release repo.git.tag("-d", latest_tag) repo.git.reset("--hard", "HEAD~1") # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Remove any previous changelog to update os.remove(str(changelog_file.resolve())) # Act with freeze_time(now_datetime.astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that the changelog file was re-created assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_version_maintains_changelog_in_update_mode_w_no_flag( changelog_file: Path, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, insertion_flag: str, ): """ Given that the changelog file exists but does not contain the insertion flag, When the version command is run with changelog.mode set to "update", Then the version is created but the changelog file is not updated. """ update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Remove the insertion flag from the existing changelog with changelog_file.open(newline=os.linesep) as rfd: expected_changelog_content = rfd.read().replace( f"{insertion_flag}{os.linesep}", "", 1, ) # no newline translations with changelog_file.open("w", newline="") as wfd: wfd.write(expected_changelog_content) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() # Capture the new changelog content (os aware because of expected content) with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file", [ lazy_fixture(example_changelog_md.__name__), lazy_fixture(example_changelog_rst.__name__), ], ) @pytest.mark.parametrize( "repo_result, repo_fixture_name, commit_type, tag_format", [ ( lazy_fixture(repo_fixture), repo_fixture, repo_fixture.split("_")[-2], "v{version}", ) for repo_fixture in [ # Must have a previous release/tag repo_w_trunk_only_conventional_commits.__name__, ] ], ) def test_version_updates_changelog_w_new_version_n_filtered_commit( repo_result: BuiltRepoResult, repo_fixture_name: str, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, commit_type: CommitConvention, tag_format: str, update_pyproject_toml: UpdatePyprojectTomlFn, run_cli: RunCliFn, changelog_file: Path, stable_now_date: GetStableDateNowFn, get_commits_from_repo_build_def: GetCommitsFromRepoBuildDefFn, get_cached_repo_data: GetCachedRepoDataFn, ): """ Given a project that has a version bumping change but also an exclusion pattern for the same change type, When the version command is run, Then the version is created and the changelog file is updated with the excluded commit info anyway. """ repo = repo_result["repo"] latest_version = get_versions_from_repo_build_def(repo_result["definition"])[-1] latest_tag = tag_format.format(version=latest_version) repo_definition = get_commits_from_repo_build_def(repo_result["definition"]) if not (repo_build_data := get_cached_repo_data(repo_fixture_name)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) # expected version bump commit (that should be in changelog) bumping_commit = repo_definition[str(latest_version)]["commits"][-1] expected_bump_message = bumping_commit["desc"].capitalize() # Capture the expected changelog content expected_changelog_content = changelog_file.read_text() # Reverse last release repo.git.tag("-d", latest_tag) repo.git.reset("--hard", "HEAD~1") # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) update_pyproject_toml( "tool.semantic_release.changelog.exclude_commit_patterns", [f"{bumping_commit['msg'].split(':', maxsplit=1)[0]}: .*"], ) # Act with freeze_time(now_datetime.astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] result = run_cli(cli_cmd[1:]) # Capture the new changelog content (os aware because of expected content) actual_content = changelog_file.read_text() # Evaluate assert_successful_exit_code(result, cli_cmd) assert expected_changelog_content == actual_content for msg_part in expected_bump_message.split("\n\n"): assert msg_part.capitalize() in actual_content python-semantic-release-10.4.1/tests/e2e/cmd_version/test_version_changelog_custom_commit_msg.py000066400000000000000000000161451506116242600334110ustar00rootroot00000000000000from __future__ import annotations from datetime import datetime, timedelta, timezone from os import remove as delete_file from textwrap import dedent from typing import TYPE_CHECKING import pytest from freezegun import freeze_time from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.changelog.context import ChangelogMode from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.e2e.conftest import ( get_sanitized_md_changelog_content, get_sanitized_rst_changelog_content, ) from tests.fixtures.example_project import ( changelog_md_file, changelog_rst_file, ) from tests.fixtures.repos import ( repo_w_trunk_only_conventional_commits, ) from tests.util import ( assert_successful_exit_code, ) if TYPE_CHECKING: from pathlib import Path from typing import TypedDict from tests.conftest import GetCachedRepoDataFn, GetStableDateNowFn, RunCliFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import UpdatePyprojectTomlFn from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuiltRepoResult, CommitDef, GetCfgValueFromDefFn, GetVersionsFromRepoBuildDefFn, SplitRepoActionsByReleaseTagsFn, ) class Commit2Section(TypedDict): conventional: Commit2SectionCommit emoji: Commit2SectionCommit scipy: Commit2SectionCommit class Commit2SectionCommit(TypedDict): commit: CommitDef section: str @pytest.mark.parametrize( str.join( ", ", [ "custom_commit_message", "changelog_mode", "changelog_file", "get_sanitized_changelog_content", "repo_result", "repo_fixture_name", ], ), [ pytest.param( custom_commit_message, changelog_mode, lazy_fixture(changelog_file), lazy_fixture(cl_sanitizer), lazy_fixture(repo_fixture_name), repo_fixture_name, marks=pytest.mark.comprehensive, ) for changelog_mode in [ChangelogMode.INIT, ChangelogMode.UPDATE] for changelog_file, cl_sanitizer in [ ( changelog_md_file.__name__, get_sanitized_md_changelog_content.__name__, ), ( changelog_rst_file.__name__, get_sanitized_rst_changelog_content.__name__, ), ] for repo_fixture_name, custom_commit_message in [ *[ ( # Repos: Must have at least 2 releases repo_w_trunk_only_conventional_commits.__name__, commit_msg, ) for commit_msg in [ dedent( # Conventional compliant prefix with skip-ci indicator """\ chore(release): v{version} [skip ci] Automatically generated by python-semantic-release. """ ), ] ], ] ], ) def test_version_changelog_content_custom_commit_message_excluded_automatically( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, get_cfg_value_from_def: GetCfgValueFromDefFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, build_repo_from_definition: BuildRepoFromDefinitionFn, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, changelog_mode: ChangelogMode, custom_commit_message: str, repo_fixture_name: str, stable_now_date: GetStableDateNowFn, example_project_dir: Path, get_sanitized_changelog_content: GetSanitizedChangelogContentFn, get_cached_repo_data: GetCachedRepoDataFn, ): """ Given a repo with a custom release commit message When the version subcommand is invoked with the changelog flag Then the resulting changelog content should not include the custom commit message It should work regardless of changelog mode and changelog file type """ expected_changelog_content = get_sanitized_changelog_content( repo_dir=example_project_dir, remove_insertion_flag=bool(changelog_mode == ChangelogMode.INIT), ) repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] all_versions = get_versions_from_repo_build_def(repo_def) latest_version = all_versions[-1] previous_tag = tag_format_str.format(version=all_versions[-2]) # split repo actions by release actions releasetags_2_steps = split_repo_actions_by_release_tags(repo_def) # Reverse release to make the previous version again with the new commit message repo.git.tag("-d", latest_version.as_tag()) repo.git.reset("--hard", f"{previous_tag}~1") repo.git.tag("-d", previous_tag) # Set the project configurations update_pyproject_toml("tool.semantic_release.changelog.mode", changelog_mode.value) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) update_pyproject_toml( "tool.semantic_release.commit_message", custom_commit_message, ) if not (repo_build_data := get_cached_repo_data(repo_fixture_name)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) if changelog_mode == ChangelogMode.UPDATE and len(all_versions) == 2: # When in update mode, and at the very first release, its better the # changelog file does not exist as we have an non-conformative example changelog # in the base example project delete_file(example_project_dir / changelog_file) cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] # Act: make the first release again with freeze_time(now_datetime.astimezone(timezone.utc)): result = run_cli(cli_cmd[1:]) assert_successful_exit_code(result, cli_cmd) # Act: apply commits for change of version # stop before the release step steps_for_next_release = releasetags_2_steps[latest_version][:-1] build_repo_from_definition( dest_dir=example_project_dir, repo_construction_steps=steps_for_next_release, ) # Act: make the second release again with freeze_time(now_datetime.astimezone(timezone.utc) + timedelta(minutes=1)): result = run_cli(cli_cmd[1:]) actual_content = get_sanitized_changelog_content( repo_dir=example_project_dir, remove_insertion_flag=bool(changelog_mode == ChangelogMode.INIT), ) # Evaluate assert_successful_exit_code(result, cli_cmd) assert expected_changelog_content == actual_content python-semantic-release-10.4.1/tests/e2e/cmd_version/test_version_github_actions.py000066400000000000000000000113101506116242600306410ustar00rootroot00000000000000from __future__ import annotations import os from datetime import timezone from typing import TYPE_CHECKING, cast import pytest from freezegun import freeze_time from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from tests.const import EXAMPLE_PROJECT_LICENSE, MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.repos import ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, ) from tests.util import actions_output_to_dict, assert_successful_exit_code if TYPE_CHECKING: from semantic_release.hvcs.github import Github from tests.conftest import GetStableDateNowFn, RunCliFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuiltRepoResult, GenerateDefaultReleaseNotesFromDefFn, GetCfgValueFromDefFn, GetHvcsClientFromRepoDefFn, GetVersionsFromRepoBuildDefFn, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__)], ) def test_version_writes_github_actions_output( repo_result: BuiltRepoResult, run_cli: RunCliFn, example_project_dir: ExProjectDir, get_cfg_value_from_def: GetCfgValueFromDefFn, get_hvcs_client_from_repo_def: GetHvcsClientFromRepoDefFn, generate_default_release_notes_from_def: GenerateDefaultReleaseNotesFromDefFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, stable_now_date: GetStableDateNowFn, ): mock_output_file = example_project_dir / "action.out" repo_def = repo_result["definition"] tag_format_str = cast(str, get_cfg_value_from_def(repo_def, "tag_format_str")) all_versions = get_versions_from_repo_build_def(repo_def) latest_release_version = all_versions[-1] release_tag = tag_format_str.format(version=latest_release_version) previous_version = all_versions[-2] if len(all_versions) > 1 else None hvcs_client = cast("Github", get_hvcs_client_from_repo_def(repo_def)) repo_actions_per_version = split_repo_actions_by_release_tags( repo_definition=repo_def ) expected_gha_output = { "released": str(True).lower(), "version": latest_release_version, "tag": release_tag, "link": hvcs_client.create_release_url(release_tag), "commit_sha": "0" * 40, "is_prerelease": str(latest_release_version.is_prerelease).lower(), "previous_version": str(previous_version) if previous_version else "", "release_notes": generate_default_release_notes_from_def( version_actions=repo_actions_per_version[latest_release_version], hvcs=hvcs_client, previous_version=previous_version, license_name=EXAMPLE_PROJECT_LICENSE, mask_initial_release=get_cfg_value_from_def( repo_def, "mask_initial_release" ), ), } # Remove the previous tag & version commit repo_result["repo"].git.tag(release_tag, delete=True) repo_result["repo"].git.reset("HEAD~1", hard=True) # Act with freeze_time(stable_now_date().astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push"] result = run_cli( cli_cmd[1:], env={"GITHUB_OUTPUT": str(mock_output_file.resolve())} ) assert_successful_exit_code(result, cli_cmd) # Update the expected output with the commit SHA expected_gha_output["commit_sha"] = repo_result["repo"].head.commit.hexsha if not mock_output_file.exists(): pytest.fail( f"Expected output file {mock_output_file} to be created, but it does not exist." ) # Extract the output with open(mock_output_file, encoding="utf-8", newline=os.linesep) as rfd: action_outputs = actions_output_to_dict(rfd.read()) # Evaluate expected_keys = set(expected_gha_output.keys()) actual_keys = set(action_outputs.keys()) key_difference = expected_keys.symmetric_difference(actual_keys) assert not key_difference, f"Unexpected keys found: {key_difference}" assert expected_gha_output["released"] == action_outputs["released"] assert expected_gha_output["version"] == action_outputs["version"] assert expected_gha_output["tag"] == action_outputs["tag"] assert expected_gha_output["is_prerelease"] == action_outputs["is_prerelease"] assert expected_gha_output["link"] == action_outputs["link"] assert expected_gha_output["previous_version"] == action_outputs["previous_version"] assert expected_gha_output["commit_sha"] == action_outputs["commit_sha"] assert expected_gha_output["release_notes"] == action_outputs["release_notes"] python-semantic-release-10.4.1/tests/e2e/cmd_version/test_version_print.py000066400000000000000000001230651506116242600270060ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING, cast import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.hvcs.github import Github from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.commit_parsers import ( conventional_minor_commits, default_conventional_parser, ) from tests.fixtures.git_repo import get_commit_def_of_conventional_commit from tests.fixtures.repos import ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format, repo_w_no_tags_conventional_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_conventional_commits_using_tag_format, ) from tests.fixtures.repos.trunk_based_dev.repo_w_no_tags import ( repo_w_no_tags_conventional_commits_using_tag_format, repo_w_no_tags_conventional_commits_w_zero_version, ) from tests.util import ( add_text_to_file, assert_exit_code, assert_successful_exit_code, ) if TYPE_CHECKING: from unittest.mock import MagicMock from requests_mock import Mocker from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.token import ParseResult from tests.conftest import RunCliFn from tests.e2e.conftest import StripLoggingMessagesFn from tests.fixtures.git_repo import ( BuiltRepoResult, GetCfgValueFromDefFn, GetCommitDefFn, GetVersionsFromRepoBuildDefFn, SimulateChangeCommitsNReturnChangelogEntryFn, ) @pytest.mark.parametrize( "repo_result, commits, force_args, next_release_version", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), cli_args, next_release_version, ) for cli_args, next_release_version in ( # Dynamic version bump determination (based on commits) ([], "0.2.0"), # Dynamic version bump determination (based on commits) with build metadata (["--build-metadata", "build.12345"], "0.2.0+build.12345"), # Forced version bump (["--prerelease"], "0.1.1-rc.1"), (["--patch"], "0.1.2"), (["--minor"], "0.2.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata (["--patch", "--build-metadata", "build.12345"], "0.1.2+build.12345"), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.1.1-rc.1"), (["--patch", "--as-prerelease"], "0.1.2-rc.1"), (["--minor", "--as-prerelease"], "0.2.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.1.2-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.1.2-beta.1+build.12345", ), ) ], ) def test_version_print_next_version( repo_result: BuiltRepoResult, commits: list[str], force_args: list[str], next_release_version: str, file_in_repo: str, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, ): """ Given a generic repository at the latest release version and a subsequent commit, When running the version command with the --print flag, Then the expected next version should be printed and exit without making any changes to the repository. Note: The point of this test is to only verify that the `--print` flag does not make any changes to the repository--not to validate if the next version is calculated correctly per the repository structure (see test_version_release & test_version_force_level for correctness). However, we do validate that --print & a force option and/or --as-prerelease options work together to print the next version correctly but not make a change to the repo. """ repo = repo_result["repo"] # Make a commit to ensure we have something to release # otherwise the "no release will be made" logic will kick in first add_text_to_file(repo, file_in_repo) repo.git.commit(m=commits[-1], a=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print", *force_args] result = run_cli(cli_cmd[1:], env={Github.DEFAULT_ENV_TOKEN_NAME: "1234"}) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result, commits, force_args, next_release_version", [ *[ pytest.param( lazy_fixture(repo_fixture_name), lazy_fixture(conventional_minor_commits.__name__), cli_args, next_release_version, marks=marks if marks else [], ) for repo_fixture_name, marks in ( (repo_w_trunk_only_conventional_commits.__name__, None), ( repo_w_trunk_only_conventional_commits_using_tag_format.__name__, pytest.mark.comprehensive, ), ) for cli_args, next_release_version in ( # Dynamic version bump determination (based on commits) ([], "0.2.0"), # Dynamic version bump determination (based on commits) with build metadata (["--build-metadata", "build.12345"], "0.2.0+build.12345"), # Forced version bump (["--prerelease"], "0.1.1-rc.1"), (["--patch"], "0.1.2"), (["--minor"], "0.2.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata (["--patch", "--build-metadata", "build.12345"], "0.1.2+build.12345"), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.1.1-rc.1"), (["--patch", "--as-prerelease"], "0.1.2-rc.1"), (["--minor", "--as-prerelease"], "0.2.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.1.2-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.1.2-beta.1+build.12345", ), ) ], *[ pytest.param( lazy_fixture(repo_fixture_name), [], cli_args, next_release_version, marks=pytest.mark.comprehensive, ) for repo_fixture_name in ( repo_w_no_tags_conventional_commits_w_zero_version.__name__, ) for cli_args, next_release_version in ( # Dynamic version bump determination (based on commits) ([], "0.1.0"), # Dynamic version bump determination (based on commits) with build metadata (["--build-metadata", "build.12345"], "0.1.0+build.12345"), # Forced version bump (["--prerelease"], "0.0.0-rc.1"), (["--patch"], "0.0.1"), (["--minor"], "0.1.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata (["--patch", "--build-metadata", "build.12345"], "0.0.1+build.12345"), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.0.0-rc.1"), (["--patch", "--as-prerelease"], "0.0.1-rc.1"), (["--minor", "--as-prerelease"], "0.1.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.0.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.0.1-beta.1+build.12345", ), ) ], ], ) def test_version_print_tag_prints_next_tag( repo_result: BuiltRepoResult, commits: list[str], force_args: list[str], next_release_version: str, get_cfg_value_from_def: GetCfgValueFromDefFn, file_in_repo: str, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, ): """ Given a generic repository at the latest release version and a subsequent commit, When running the version command with the --print-tag flag, Then the expected next release tag should be printed and exit without making any changes to the repository. Note: The point of this test is to only verify that the `--print-tag` flag does not make any changes to the repository--not to validate if the next version is calculated correctly per the repository structure (see test_version_release & test_version_force_level for correctness). However, we do validate that --print-tag & a force option and/or --as-prerelease options work together to print the next release tag correctly but not make a change to the repo. """ repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] next_release_tag = tag_format_str.format(version=next_release_version) if len(commits) > 1: # Make a commit to ensure we have something to release # otherwise the "no release will be made" logic will kick in first add_text_to_file(repo, file_in_repo) repo.git.commit(m=commits[-1], a=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-tag", *force_args] result = run_cli(cli_cmd[1:], env={Github.DEFAULT_ENV_TOKEN_NAME: "1234"}) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate assert_successful_exit_code(result, cli_cmd) assert f"{next_release_tag}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result, commits, force_args, next_release_version", [ pytest.param( lazy_fixture(repo_fixture_name), [], cli_args, next_release_version, marks=pytest.mark.comprehensive, ) for repo_fixture_name in ( repo_w_no_tags_conventional_commits.__name__, repo_w_no_tags_conventional_commits_using_tag_format.__name__, ) for cli_args, next_release_version in ( # Dynamic version bump determination (based on commits) ([], "1.0.0"), # Dynamic version bump determination (based on commits) with build metadata (["--build-metadata", "build.12345"], "1.0.0+build.12345"), # Forced version bump (["--prerelease"], "0.0.0-rc.1"), (["--patch"], "0.0.1"), (["--minor"], "0.1.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata (["--patch", "--build-metadata", "build.12345"], "0.0.1+build.12345"), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.0.0-rc.1"), (["--patch", "--as-prerelease"], "0.0.1-rc.1"), (["--minor", "--as-prerelease"], "0.1.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.0.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.0.1-beta.1+build.12345", ), ) ], ) def test_version_print_tag_prints_next_tag_no_zero_versions( repo_result: BuiltRepoResult, commits: list[str], force_args: list[str], next_release_version: str, get_cfg_value_from_def: GetCfgValueFromDefFn, file_in_repo: str, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, ): """ Given a generic repository at the latest release version and a subsequent commit, When running the version command with the --print-tag flag, Then the expected next release tag should be printed and exit without making any changes to the repository. Note: The point of this test is to only verify that the `--print-tag` flag does not make any changes to the repository--not to validate if the next version is calculated correctly per the repository structure (see test_version_release & test_version_force_level for correctness). However, we do validate that --print-tag & a force option and/or --as-prerelease options work together to print the next release tag correctly but not make a change to the repo. """ repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] next_release_tag = tag_format_str.format(version=next_release_version) if len(commits) > 1: # Make a commit to ensure we have something to release # otherwise the "no release will be made" logic will kick in first add_text_to_file(repo, file_in_repo) repo.git.commit(m=commits[-1], a=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-tag", *force_args] result = run_cli(cli_cmd[1:], env={Github.DEFAULT_ENV_TOKEN_NAME: "1234"}) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate assert_successful_exit_code(result, cli_cmd) assert f"{next_release_tag}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_print_last_released_prints_version( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released"] result = run_cli(cli_cmd[1:], env={Github.DEFAULT_ENV_TOKEN_NAME: "1234"}) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not strip_logging_messages(result.stderr) assert f"{latest_release_version}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result, commits", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), ) ], ) def test_version_print_last_released_prints_released_if_commits( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, commits: list[str], run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, file_in_repo: str, strip_logging_messages: StripLoggingMessagesFn, ): repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] # Make a commit so the head is not on the last release add_text_to_file(repo, file_in_repo) repo.git.commit(m=commits[0], a=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not strip_logging_messages(result.stderr) assert f"{latest_release_version}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)], ) def test_version_print_last_released_prints_nothing_if_no_tags( repo_result: BuiltRepoResult, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, caplog: pytest.LogCaptureFixture, ): repo = repo_result["repo"] # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (no release actions should have occurred on print) assert_successful_exit_code(result, cli_cmd) assert result.stdout == "" # must use capture log to see this, because we use the logger to print this message # not click's output assert "No release tags found." in caplog.text # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha # No commit has been made assert not tags_set_difference # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_print_last_released_on_detached_head( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] # Setup: put the repo in a detached head state repo.git.checkout("HEAD", detach=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert not strip_logging_messages(result.stderr) assert f"{latest_release_version}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha # No commit has been made assert not tags_set_difference # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_print_last_released_on_nonrelease_branch( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] # Setup: put the repo on a non-release branch repo.create_head("next").checkout() # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert not strip_logging_messages(result.stderr) assert f"{latest_release_version}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha # No commit has been made assert not tags_set_difference # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), pytest.param( lazy_fixture( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__ ), marks=pytest.mark.comprehensive, ), ], ) def test_version_print_last_released_tag_prints_correct_tag( repo_result: BuiltRepoResult, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] latest_release_version = get_versions_from_repo_build_def(repo_def)[-1] latest_release_tag = tag_format_str.format(version=latest_release_version) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released-tag"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not strip_logging_messages(result.stderr) assert f"{latest_release_tag}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result, commits", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), ), pytest.param( lazy_fixture( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__ ), lazy_fixture(conventional_minor_commits.__name__), marks=pytest.mark.comprehensive, ), ], ) def test_version_print_last_released_tag_prints_released_if_commits( repo_result: BuiltRepoResult, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, commits: list[str], run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, file_in_repo: str, strip_logging_messages: StripLoggingMessagesFn, ): repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] latest_release_version = get_versions_from_repo_build_def(repo_def)[-1] latest_release_tag = tag_format_str.format(version=latest_release_version) # Make a commit so the head is not on the last release add_text_to_file(repo, file_in_repo) repo.git.commit(m=commits[0], a=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released-tag"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not strip_logging_messages(result.stderr) assert f"{latest_release_tag}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)], ) def test_version_print_last_released_tag_prints_nothing_if_no_tags( repo_result: BuiltRepoResult, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, caplog: pytest.LogCaptureFixture, ): repo = repo_result["repo"] # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released-tag"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (no release actions should have occurred on print) assert_successful_exit_code(result, cli_cmd) assert result.stdout == "" # must use capture log to see this, because we use the logger to print this message # not click's output assert "No release tags found." in caplog.text # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha # No commit has been made assert not tags_set_difference # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), pytest.param( lazy_fixture( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__ ), marks=pytest.mark.comprehensive, ), ], ) def test_version_print_last_released_tag_on_detached_head( repo_result: BuiltRepoResult, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] latest_release_version = get_versions_from_repo_build_def(repo_def)[-1] latest_release_tag = tag_format_str.format(version=latest_release_version) # Setup: put the repo in a detached head state repo.git.checkout("HEAD", detach=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released-tag"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert not strip_logging_messages(result.stderr) assert f"{latest_release_tag}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha # No commit has been made assert not tags_set_difference # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), pytest.param( lazy_fixture( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__ ), marks=pytest.mark.comprehensive, ), ], ) def test_version_print_last_released_tag_on_nonrelease_branch( repo_result: BuiltRepoResult, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] latest_release_version = get_versions_from_repo_build_def(repo_def)[-1] last_release_tag = tag_format_str.format(version=latest_release_version) # Setup: put the repo on a non-release branch repo.create_head("next").checkout() # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released-tag"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert not strip_logging_messages(result.stderr) assert f"{last_release_tag}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result, get_commit_def_fn, default_parser", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), lazy_fixture(get_commit_def_of_conventional_commit.__name__), lazy_fixture(default_conventional_parser.__name__), ) ], ) def test_version_print_next_version_fails_on_detached_head( repo_result: BuiltRepoResult, run_cli: RunCliFn, simulate_change_commits_n_rtn_changelog_entry: SimulateChangeCommitsNReturnChangelogEntryFn, get_commit_def_fn: GetCommitDefFn[CommitParser[ParseResult, ParserOptions]], default_parser: CommitParser[ParseResult, ParserOptions], mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): repo = repo_result["repo"] expected_error_msg = ( "Detached HEAD state cannot match any release groups; no release will be made" ) # Setup: put the repo in a detached head state repo.git.checkout("HEAD", detach=True) # Setup: make a commit to ensure we have something to release simulate_change_commits_n_rtn_changelog_entry( repo, [get_commit_def_fn("fix: make a patch fix to codebase", parser=default_parser)], ) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (expected -> actual) assert_exit_code(1, result, cli_cmd) assert not result.stdout assert f"{expected_error_msg}\n" == strip_logging_messages(result.stderr) # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result, get_commit_def_fn, default_parser", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), lazy_fixture(get_commit_def_of_conventional_commit.__name__), lazy_fixture(default_conventional_parser.__name__), ) ], ) def test_version_print_next_tag_fails_on_detached_head( repo_result: BuiltRepoResult, run_cli: RunCliFn, simulate_change_commits_n_rtn_changelog_entry: SimulateChangeCommitsNReturnChangelogEntryFn, get_commit_def_fn: GetCommitDefFn[CommitParser[ParseResult, ParserOptions]], default_parser: CommitParser[ParseResult, ParserOptions], mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): repo = repo_result["repo"] expected_error_msg = ( "Detached HEAD state cannot match any release groups; no release will be made" ) # Setup: put the repo in a detached head state repo.git.checkout("HEAD", detach=True) # Setup: make a commit to ensure we have something to release simulate_change_commits_n_rtn_changelog_entry( repo, [get_commit_def_fn("fix: make a patch fix to codebase", parser=default_parser)], ) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-tag"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = cast("set[str]", set.difference(tags_after, tags_before)) # Evaluate (expected -> actual) assert_exit_code(1, result, cli_cmd) assert not result.stdout assert f"{expected_error_msg}\n" == strip_logging_messages(result.stderr) # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 python-semantic-release-10.4.1/tests/e2e/cmd_version/test_version_release_notes.py000066400000000000000000000135551506116242600305040ustar00rootroot00000000000000from __future__ import annotations import os from datetime import timezone from typing import TYPE_CHECKING import pytest from freezegun import freeze_time from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.version.version import Version from tests.const import ( EXAMPLE_RELEASE_NOTES_TEMPLATE, MAIN_PROG_NAME, VERSION_SUBCMD, RepoActionStep, ) from tests.fixtures.repos import repo_w_no_tags_conventional_commits from tests.fixtures.repos.trunk_based_dev.repo_w_no_tags import ( repo_w_no_tags_emoji_commits, repo_w_no_tags_scipy_commits, ) from tests.util import assert_successful_exit_code, get_release_history_from_context if TYPE_CHECKING: from unittest.mock import MagicMock from requests_mock import Mocker from tests.conftest import GetStableDateNowFn, RunCliFn from tests.e2e.conftest import ( RetrieveRuntimeContextFn, ) from tests.fixtures.example_project import ( UpdatePyprojectTomlFn, UseReleaseNotesTemplateFn, ) from tests.fixtures.git_repo import ( BuiltRepoResult, GenerateDefaultReleaseNotesFromDefFn, GetHvcsClientFromRepoDefFn, ) @pytest.mark.parametrize( "repo_result, next_release_version", [ (lazy_fixture(repo_w_no_tags_conventional_commits.__name__), "1.0.0"), ], ) def test_custom_release_notes_template( repo_result: BuiltRepoResult, next_release_version: str, run_cli: RunCliFn, use_release_notes_template: UseReleaseNotesTemplateFn, retrieve_runtime_context: RetrieveRuntimeContextFn, mocked_git_push: MagicMock, post_mocker: Mocker, ) -> None: """Verify the template `.release_notes.md.j2` from `template_dir` is used.""" release_version = Version.parse(next_release_version) # Setup use_release_notes_template() runtime_context = retrieve_runtime_context(repo_result["repo"]) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--vcs-release"] result = run_cli(cli_cmd[1:]) # Must run this after the action because the release history object should be pulled from the # repository after a tag is created release_history = get_release_history_from_context(runtime_context) release = release_history.released[release_version] expected_release_notes = ( runtime_context.template_environment.from_string(EXAMPLE_RELEASE_NOTES_TEMPLATE) .render(release=release) .rstrip() + os.linesep ) # ensure normalized line endings after render expected_release_notes = str.join( os.linesep, str.split(expected_release_notes.replace("\r", ""), "\n"), ) # Assert assert_successful_exit_code(result, cli_cmd) assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 assert post_mocker.last_request is not None actual_notes = post_mocker.last_request.json()["body"] assert expected_release_notes == actual_notes @pytest.mark.parametrize( "repo_result, license_name, license_setting, mask_initial_release", [ pytest.param( lazy_fixture(repo_fixture_name), license_name, license_setting, mask_initial_release, marks=pytest.mark.comprehensive, ) for mask_initial_release in [True, False] for license_name in ["", "MIT", "GPL-3.0"] for license_setting in [ "project.license-expression", "project.license", # deprecated "project.license.text", # deprecated ] for repo_fixture_name in [ repo_w_no_tags_conventional_commits.__name__, repo_w_no_tags_emoji_commits.__name__, repo_w_no_tags_scipy_commits.__name__, ] ], ) def test_default_release_notes_license_statement( repo_result: BuiltRepoResult, run_cli: RunCliFn, license_name: str, license_setting: str, mask_initial_release: bool, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, get_hvcs_client_from_repo_def: GetHvcsClientFromRepoDefFn, generate_default_release_notes_from_def: GenerateDefaultReleaseNotesFromDefFn, ): new_version = "1.0.0" # Setup now_datetime = stable_now_date() repo_def = list(repo_result["definition"]) repo_def.append( { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": now_datetime.isoformat(timespec="seconds"), }, } ) # Setup: Overwrite the default setting (defined in test.const) update_pyproject_toml("project.license-expression", None) # Setup: set the license for the test update_pyproject_toml(license_setting, license_name) # Setup: set mask_initial_release value in configuration update_pyproject_toml( "tool.semantic_release.changelog.default_templates.mask_initial_release", mask_initial_release, ) expected_release_notes = generate_default_release_notes_from_def( version_actions=repo_def, hvcs=get_hvcs_client_from_repo_def(repo_def), previous_version=None, license_name=license_name, mask_initial_release=mask_initial_release, ) # Act with freeze_time(now_datetime.astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-changelog", "--vcs-release"] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 assert post_mocker.last_request is not None request_body = post_mocker.last_request.json() assert "body" in request_body actual_notes = request_body["body"] assert expected_release_notes == actual_notes python-semantic-release-10.4.1/tests/e2e/cmd_version/test_version_stamp.py000066400000000000000000000374571506116242600270070ustar00rootroot00000000000000from __future__ import annotations import json from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING, cast import pytest import tomlkit import yaml from dotty_dict import Dotty from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.version.declarations.enum import VersionStampType from tests.const import EXAMPLE_PROJECT_NAME, MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.repos.trunk_based_dev.repo_w_no_tags import ( repo_w_no_tags_conventional_commits, ) from tests.fixtures.repos.trunk_based_dev.repo_w_prereleases import ( repo_w_trunk_only_n_prereleases_conventional_commits, ) from tests.util import ( assert_successful_exit_code, dynamic_python_import, ) if TYPE_CHECKING: from unittest.mock import MagicMock from tests.conftest import RunCliFn from tests.fixtures.example_project import ExProjectDir, UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult VERSION_STAMP_CMD = [ MAIN_PROG_NAME, VERSION_SUBCMD, "--no-commit", "--no-tag", "--skip-build", "--no-changelog", ] """Using the version command, prevent any action besides stamping the version""" @pytest.mark.parametrize( "repo_result, expected_new_version", [ ( lazy_fixture(repo_w_trunk_only_n_prereleases_conventional_commits.__name__), "0.3.0", ) ], ) def test_version_only_stamp_version( repo_result: BuiltRepoResult, expected_new_version: str, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: MagicMock, example_pyproject_toml: Path, example_project_dir: ExProjectDir, pyproject_toml_file: Path, ) -> None: repo = repo_result["repo"] version_file = example_project_dir.joinpath( "src", EXAMPLE_PROJECT_NAME, "_version.py" ) expected_changed_files = sorted( [ str(pyproject_toml_file), str(version_file.relative_to(example_project_dir)), ] ) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} version_py_before = dynamic_python_import( version_file, f"{EXAMPLE_PROJECT_NAME}._version" ).__version__ pyproject_toml_before = tomlkit.loads( example_pyproject_toml.read_text(encoding="utf-8") ) # Modify the pyproject.toml to remove the version so we can compare it later pyproject_toml_before.get("tool", {}).get("poetry", {}).pop("version") # Act (stamp the version but also create the changelog) cli_cmd = [*VERSION_STAMP_CMD, "--minor"] result = run_cli(cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) actual_staged_files = [ # Make sure filepath uses os specific path separators str(Path(file)) # Changed files should always be staged for file in cast("str", repo.git.diff(staged=True, name_only=True)).splitlines() ] pyproject_toml_after = tomlkit.loads( example_pyproject_toml.read_text(encoding="utf-8") ) pyproj_version_after = ( pyproject_toml_after.get("tool", {}).get("poetry", {}).pop("version") ) # Load python module for reading the version (ensures the file is valid) version_py_after = dynamic_python_import( version_file, f"{EXAMPLE_PROJECT_NAME}._version" ).__version__ # Evaluate (no release actions should be taken but version should be stamped from forced minor bump) assert_successful_exit_code(result, cli_cmd) assert head_sha_before == head_after.hexsha # No commit should be made assert not tags_set_difference # No tag should be created # no push as it should be turned off automatically assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 # no vcs release creation occurred # Files that should receive version change assert expected_changed_files == actual_staged_files # Compare pyproject.toml assert pyproject_toml_before == pyproject_toml_after assert expected_new_version == pyproj_version_after # Compare _version.py assert expected_new_version == version_py_after assert version_py_before != version_py_after # ============================================================================== # # VERSION STAMP DIFFERENT CONTENT TYPES # # ============================================================================== # @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_python( run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, example_project_dir: ExProjectDir, ) -> None: new_version = "1.0.0" target_file = example_project_dir.joinpath( "src", EXAMPLE_PROJECT_NAME, "_version.py" ) # Set configuration to modify the python file update_pyproject_toml( "tool.semantic_release.version_variables", [f"{target_file.relative_to(example_project_dir)}:__version__"], ) # Act cli_cmd = VERSION_STAMP_CMD result = run_cli(cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Load python module for reading the version (ensures the file is valid) version_py_after = dynamic_python_import( target_file, f"{EXAMPLE_PROJECT_NAME}._version" ).__version__ # Check the version was updated assert new_version == version_py_after @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_toml( run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, default_tag_format_str: str, ) -> None: orig_version = "0.0.0" new_version = "1.0.0" orig_release = default_tag_format_str.format(version=orig_version) new_release = default_tag_format_str.format(version=new_version) target_file = Path("example.toml") orig_toml = dedent( f"""\ [package] name = "example" version = "{orig_version}" release = "{orig_release}" date-released = "1970-01-01" """ ) orig_toml_obj = Dotty(tomlkit.parse(orig_toml)) # Write initial text in file target_file.write_text(orig_toml) # Set configuration to modify the yaml file update_pyproject_toml( "tool.semantic_release.version_toml", [ f"{target_file}:package.version:{VersionStampType.NUMBER_FORMAT.value}", f"{target_file}:package.release:{VersionStampType.TAG_FORMAT.value}", ], ) # Act cli_cmd = VERSION_STAMP_CMD result = run_cli(cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_toml_obj = Dotty(tomlkit.parse(target_file.read_text())) # Check the version was updated assert new_version == resulting_toml_obj["package.version"] assert new_release == resulting_toml_obj["package.release"] # Check the rest of the content is the same (by resetting the version & comparing) resulting_toml_obj["package.version"] = orig_version resulting_toml_obj["package.release"] = orig_release assert orig_toml_obj == resulting_toml_obj @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_yaml( run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, ) -> None: orig_version = "0.0.0" new_version = "1.0.0" target_file = Path("example.yml") orig_yaml = dedent( f"""\ --- package: example version: {orig_version} date-released: 1970-01-01 """ ) # Write initial text in file target_file.write_text(orig_yaml) # Set configuration to modify the yaml file update_pyproject_toml( "tool.semantic_release.version_variables", [f"{target_file}:version"] ) # Act cli_cmd = VERSION_STAMP_CMD result = run_cli(cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_yaml_obj = yaml.safe_load(target_file.read_text()) # Check the version was updated assert new_version == resulting_yaml_obj["version"] # Check the rest of the content is the same (by resetting the version & comparing) resulting_yaml_obj["version"] = orig_version assert yaml.safe_load(orig_yaml) == resulting_yaml_obj @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_yaml_cff( run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, ) -> None: """ Given a yaml file with a top level version directive, When the version command is run, Then the version is updated in the file and the rest of the content is unchanged & parsable Based on https://github.com/python-semantic-release/python-semantic-release/issues/962 """ orig_version = "0.0.0" new_version = "1.0.0" target_file = Path("CITATION.cff") orig_yaml = dedent( f"""\ --- cff-version: 1.2.0 message: "If you use this software, please cite it as below." authors: - family-names: Doe given-names: Jon orcid: https://orcid.org/1234-6666-2222-5555 title: "My Research Software" version: {orig_version} date-released: 1970-01-01 """ ) # Write initial text in file target_file.write_text(orig_yaml) # Set configuration to modify the yaml file update_pyproject_toml( "tool.semantic_release.version_variables", [f"{target_file}:version"] ) # Act cli_cmd = VERSION_STAMP_CMD result = run_cli(cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_yaml_obj = yaml.safe_load(target_file.read_text()) # Check the version was updated assert new_version == resulting_yaml_obj["version"] # Check the rest of the content is the same (by resetting the version & comparing) resulting_yaml_obj["version"] = orig_version assert yaml.safe_load(orig_yaml) == resulting_yaml_obj @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_json( run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, ) -> None: orig_version = "0.0.0" new_version = "1.0.0" target_file = Path("plugins.json") orig_json = { "id": "test-plugin", "version": orig_version, "meta": { "description": "Test plugin", }, } # Write initial text in file target_file.write_text(json.dumps(orig_json, indent=4)) # Set configuration to modify the json file update_pyproject_toml( "tool.semantic_release.version_variables", [f"{target_file}:version"] ) # Act cli_cmd = VERSION_STAMP_CMD result = run_cli(cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_json_obj = json.loads(target_file.read_text()) # Check the version was updated assert new_version == resulting_json_obj["version"] # Check the rest of the content is the same (by resetting the version & comparing) resulting_json_obj["version"] = orig_version assert orig_json == resulting_json_obj @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_yaml_github_actions( run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, default_tag_format_str: str, ) -> None: """ Given a yaml file with github actions 'uses:' directives which use @vX.Y.Z version declarations, When a version is stamped and configured to stamp the version using the tag format, Then the file is updated with the new version in the tag format Based on https://github.com/python-semantic-release/python-semantic-release/issues/1156 """ orig_version = "0.0.0" new_version = "1.0.0" target_file = Path("combined.yml") action1_yaml_filepath = "my-org/my-actions/.github/workflows/action1.yml" action2_yaml_filepath = "my-org/my-actions/.github/workflows/action2.yml" orig_yaml = dedent( f"""\ --- on: workflow_call: jobs: action1: uses: {action1_yaml_filepath}@{default_tag_format_str.format(version=orig_version)} action2: uses: {action2_yaml_filepath}@{default_tag_format_str.format(version=orig_version)} """ ) expected_action1_value = ( f"{action1_yaml_filepath}@{default_tag_format_str.format(version=new_version)}" ) expected_action2_value = ( f"{action2_yaml_filepath}@{default_tag_format_str.format(version=new_version)}" ) # Setup: Write initial text in file target_file.write_text(orig_yaml) # Setup: Set configuration to modify the yaml file update_pyproject_toml( "tool.semantic_release.version_variables", [ f"{target_file}:{action1_yaml_filepath}:{VersionStampType.TAG_FORMAT.value}", f"{target_file}:{action2_yaml_filepath}:{VersionStampType.TAG_FORMAT.value}", ], ) # Act cli_cmd = VERSION_STAMP_CMD result = run_cli(cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_yaml_obj = yaml.safe_load(target_file.read_text()) # Check the version was updated assert expected_action1_value == resulting_yaml_obj["jobs"]["action1"]["uses"] assert expected_action2_value == resulting_yaml_obj["jobs"]["action2"]["uses"] # Check the rest of the content is the same (by setting the version & comparing) original_yaml_obj = yaml.safe_load(orig_yaml) original_yaml_obj["jobs"]["action1"]["uses"] = expected_action1_value original_yaml_obj["jobs"]["action2"]["uses"] = expected_action2_value assert original_yaml_obj == resulting_yaml_obj @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_yaml_kustomization_container_spec( run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, default_tag_format_str: str, ) -> None: """ Given a yaml file with directives that expect a vX.Y.Z version tag declarations, When a version is stamped and configured to stamp the version using the tag format, Then the file is updated with the new version in the tag format Based on https://github.com/python-semantic-release/python-semantic-release/issues/846 """ orig_version = "0.0.0" new_version = "1.0.0" target_file = Path("kustomization.yaml") orig_yaml = dedent( f"""\ images: - name: repo/image newTag: {default_tag_format_str.format(version=orig_version)} """ ) expected_new_tag_value = default_tag_format_str.format(version=new_version) # Setup: Write initial text in file target_file.write_text(orig_yaml) # Setup: Set configuration to modify the yaml file update_pyproject_toml( "tool.semantic_release.version_variables", [ f"{target_file}:newTag:{VersionStampType.TAG_FORMAT.value}", ], ) # Act cli_cmd = VERSION_STAMP_CMD result = run_cli(cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_yaml_obj = yaml.safe_load(target_file.read_text()) # Check the version was updated assert expected_new_tag_value == resulting_yaml_obj["images"][0]["newTag"] # Check the rest of the content is the same (by resetting the version & comparing) original_yaml_obj = yaml.safe_load(orig_yaml) resulting_yaml_obj["images"][0]["newTag"] = original_yaml_obj["images"][0]["newTag"] assert original_yaml_obj == resulting_yaml_obj python-semantic-release-10.4.1/tests/e2e/cmd_version/test_version_strict.py000066400000000000000000000103421506116242600271530ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.hvcs.github import Github from tests.const import MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.repos import repo_w_trunk_only_conventional_commits from tests.util import assert_exit_code if TYPE_CHECKING: from unittest.mock import MagicMock from requests_mock import Mocker from tests.conftest import RunCliFn from tests.e2e.conftest import StripLoggingMessagesFn from tests.fixtures.git_repo import BuiltRepoResult, GetVersionsFromRepoBuildDefFn @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_already_released_when_strict( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): """ Given repo has no new changes since the last release, When running the version command in strict mode, Then no version release should happen, which means no code changes, no build, no commit, no tag, no push, and no vcs release creation while returning an exit code of 2. """ repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] expected_error_msg = f"[bold orange1]No release will be made, {latest_release_version} has already been released!" # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = sorted([tag.name for tag in repo.tags]) # Act cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD] result = run_cli(cli_cmd[1:], env={Github.DEFAULT_ENV_TOKEN_NAME: "1234"}) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = sorted([tag.name for tag in repo.tags]) # Evaluate assert_exit_code(2, result, cli_cmd) assert f"{latest_release_version}\n" == result.stdout assert f"{expected_error_msg}\n" == strip_logging_messages(result.stderr) # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert tags_before == tags_after assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)] ) def test_version_on_nonrelease_branch_when_strict( repo_result: BuiltRepoResult, run_cli: RunCliFn, mocked_git_push: MagicMock, post_mocker: Mocker, strip_logging_messages: StripLoggingMessagesFn, ): """ Given repo is on a non-release branch, When running the version command in strict mode, Then no version release should happen which means no code changes, no build, no commit, no tag, no push, and no vcs release creation while returning an exit code of 2. """ repo = repo_result["repo"] # Setup branch = repo.create_head("next").checkout() expected_error_msg = ( f"branch '{branch.name}' isn't in any release groups; no release will be made\n" ) repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = sorted([tag.name for tag in repo.tags]) # Act cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD] result = run_cli(cli_cmd[1:]) # Evaluate assert_exit_code(2, result, cli_cmd) assert not result.stdout assert expected_error_msg == strip_logging_messages(result.stderr) # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) tags_after = sorted([tag.name for tag in repo.tags]) assert repo_status_before == repo.git.status(short=True) assert head_before == repo.head.commit.hexsha assert tags_before == tags_after assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 python-semantic-release-10.4.1/tests/e2e/conftest.py000066400000000000000000000164531506116242600223650ustar00rootroot00000000000000from __future__ import annotations import os from pathlib import Path from re import IGNORECASE, MULTILINE, compile as regexp from typing import TYPE_CHECKING from unittest.mock import MagicMock import pytest from requests_mock import ANY from semantic_release.cli import config as cli_config_module from semantic_release.cli.config import ( GlobalCommandLineOptions, RawConfig, RuntimeContext, ) from semantic_release.cli.const import DEFAULT_CONFIG_FILE from semantic_release.cli.util import load_raw_config_file from tests.util import prepare_mocked_git_command_wrapper_type if TYPE_CHECKING: from re import Pattern from typing import Protocol from git.repo import Repo from pytest import MonkeyPatch from requests_mock.mocker import Mocker from tests.fixtures.example_project import ExProjectDir class GetSanitizedChangelogContentFn(Protocol): def __call__( self, repo_dir: Path, changelog_file: Path = ..., remove_insertion_flag: bool = True, ) -> str: ... class ReadConfigFileFn(Protocol): """Read the raw config file from `config_path`.""" def __call__(self, file: Path | str) -> RawConfig: ... class RetrieveRuntimeContextFn(Protocol): """Retrieve the runtime context for a repo.""" def __call__(self, repo: Repo) -> RuntimeContext: ... class StripLoggingMessagesFn(Protocol): def __call__(self, log: str) -> str: ... @pytest.hookimpl(tryfirst=True) def pytest_collection_modifyitems(items: list[pytest.Item]) -> None: """Apply the e2e marker to all tests in the end-to-end test directory.""" cli_test_directory = Path(__file__).parent for item in items: if cli_test_directory in item.path.parents: item.add_marker(pytest.mark.e2e) @pytest.fixture def post_mocker(requests_mock: Mocker) -> Mocker: """Patch all POST requests, mocking a response body for VCS release creation.""" requests_mock.register_uri("POST", ANY, json={"id": 999}) return requests_mock @pytest.fixture def mocked_git_push(monkeypatch: MonkeyPatch) -> MagicMock: """Mock the `Repo.git.push()` method in `semantic_release.cli.main`.""" mocked_push = MagicMock() cls = prepare_mocked_git_command_wrapper_type(push=mocked_push) monkeypatch.setattr(cli_config_module.Repo, "GitCommandWrapperType", cls) return mocked_push @pytest.fixture def config_path(example_project_dir: ExProjectDir) -> Path: return example_project_dir / DEFAULT_CONFIG_FILE @pytest.fixture(scope="session") def read_config_file() -> ReadConfigFileFn: def _read_config_file(file: Path | str) -> RawConfig: config_text = load_raw_config_file(file) return RawConfig.model_validate(config_text) return _read_config_file @pytest.fixture def cli_options(config_path: Path) -> GlobalCommandLineOptions: return GlobalCommandLineOptions( noop=False, verbosity=0, strict=False, config_file=str(config_path), ) @pytest.fixture def retrieve_runtime_context( read_config_file: ReadConfigFileFn, cli_options: GlobalCommandLineOptions, ) -> RetrieveRuntimeContextFn: def _retrieve_runtime_context(repo: Repo) -> RuntimeContext: cwd = os.getcwd() repo_dir = str(Path(repo.working_dir).resolve()) os.chdir(repo_dir) try: raw_config = read_config_file(cli_options.config_file) return RuntimeContext.from_raw_config(raw_config, cli_options) finally: os.chdir(cwd) return _retrieve_runtime_context @pytest.fixture(scope="session") def strip_logging_messages() -> StripLoggingMessagesFn: """Fixture to strip logging messages from the output.""" # Log levels match SemanticReleaseLogLevel enum values logger_msg_pattern = regexp( r"^\s*(?:\[\d\d:\d\d:\d\d\])?\s*(FATAL|CRITICAL|ERROR|WARNING|INFO|DEBUG|SILLY).*?\n(?:\s+\S.*?\n)*(?!\n[ ]{11})", MULTILINE, ) def _strip_logging_messages(log: str) -> str: # Make sure it ends with a newline return logger_msg_pattern.sub("", log.rstrip("\n") + "\n") return _strip_logging_messages @pytest.fixture(scope="session") def long_hash_pattern() -> Pattern[str]: return regexp(r"\b([0-9a-f]{40})\b", IGNORECASE) @pytest.fixture(scope="session") def short_hash_pattern() -> Pattern[str]: return regexp(r"\b([0-9a-f]{7})\b", IGNORECASE) @pytest.fixture(scope="session") def get_sanitized_rst_changelog_content( changelog_rst_file: Path, default_rst_changelog_insertion_flag: str, long_hash_pattern: Pattern[str], short_hash_pattern: Pattern[str], ) -> GetSanitizedChangelogContentFn: rst_short_hash_link_pattern = regexp(r"(_[0-9a-f]{7})\b", IGNORECASE) def _get_sanitized_rst_changelog_content( repo_dir: Path, changelog_file: Path = changelog_rst_file, remove_insertion_flag: bool = False, ) -> str: if not (changelog_path := repo_dir / changelog_file).exists(): return "" # Note that our repo generation fixture includes the insertion flag automatically # toggle remove_insertion_flag to True to remove the insertion flag, applies to Init mode repos with changelog_path.open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison changelog_content = ( rfd.read().replace( f"{default_rst_changelog_insertion_flag}{os.linesep}", "" ) if remove_insertion_flag else rfd.read() ).replace("\r", "") changelog_content = long_hash_pattern.sub("0" * 40, changelog_content) changelog_content = short_hash_pattern.sub("0" * 7, changelog_content) return rst_short_hash_link_pattern.sub(f'_{"0" * 7}', changelog_content) return _get_sanitized_rst_changelog_content @pytest.fixture(scope="session") def get_sanitized_md_changelog_content( changelog_md_file: Path, default_md_changelog_insertion_flag: str, long_hash_pattern: Pattern[str], short_hash_pattern: Pattern[str], ) -> GetSanitizedChangelogContentFn: def _get_sanitized_md_changelog_content( repo_dir: Path, changelog_file: Path = changelog_md_file, remove_insertion_flag: bool = False, ) -> str: if not (changelog_path := repo_dir / changelog_file).exists(): return "" # Note that our repo generation fixture includes the insertion flag automatically # toggle remove_insertion_flag to True to remove the insertion flag, applies to Init mode repos with changelog_path.open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison changelog_content = ( rfd.read().replace( f"{default_md_changelog_insertion_flag}{os.linesep}", "" ) if remove_insertion_flag else rfd.read() ).replace("\r", "") changelog_content = long_hash_pattern.sub("0" * 40, changelog_content) return short_hash_pattern.sub("0" * 7, changelog_content) return _get_sanitized_md_changelog_content python-semantic-release-10.4.1/tests/e2e/test_help.py000066400000000000000000000157611506116242600225300ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.commands.changelog import changelog from semantic_release.cli.commands.generate_config import generate_config from semantic_release.cli.commands.main import main from semantic_release.cli.commands.publish import publish from semantic_release.cli.commands.version import version from tests.const import MAIN_PROG_NAME, SUCCESS_EXIT_CODE from tests.fixtures.repos import repo_w_trunk_only_conventional_commits from tests.util import assert_exit_code if TYPE_CHECKING: from click import Command from tests.conftest import RunCliFn from tests.fixtures import UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult # Define the expected exit code for the help command HELP_EXIT_CODE = SUCCESS_EXIT_CODE @pytest.mark.parametrize( "help_option", ("-h", "--help"), ids=lambda opt: opt.lstrip("-") ) @pytest.mark.parametrize( "command", (main, changelog, generate_config, publish, version), ids=lambda cmd: cmd.name, ) def test_help_no_repo( help_option: str, command: Command, run_cli: RunCliFn, change_to_ex_proj_dir: None, ): """ Test that the help message is displayed even when the current directory is not a git repository and there is not a configuration file available. Documented issue #840 """ # Generate some expected output that should be specific per command cmd_usage = str.join( " ", list( filter( None, [ "Usage:", MAIN_PROG_NAME, command.name if command.name != "main" else "", "[OPTIONS]", "" if command.name != main.name else "COMMAND [ARGS]...", ], ) ), ) # Create the arguments list for subcommands unless its main args = list( filter(None, [command.name if command.name != main.name else "", help_option]) ) # Run the command with the help option result = run_cli(args, invoke_kwargs={"prog_name": MAIN_PROG_NAME}) # Evaluate result assert_exit_code(HELP_EXIT_CODE, result, [MAIN_PROG_NAME, *args]) assert cmd_usage in result.output @pytest.mark.parametrize( "help_option", ("-h", "--help"), ids=lambda opt: opt.lstrip("-") ) @pytest.mark.parametrize( "command", (main, changelog, generate_config, publish, version), ids=lambda cmd: cmd.name, ) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_help_valid_config( help_option: str, command: Command, run_cli: RunCliFn, ): """ Test that the help message is displayed when the current directory is a git repository and there is a valid configuration file available. Documented issue #840 """ cmd_usage = str.join( " ", list( filter( None, [ "Usage:", MAIN_PROG_NAME, command.name if command.name != main.name else "", "[OPTIONS]", "" if command.name != main.name else "COMMAND [ARGS]...", ], ) ), ) # Create the arguments list for subcommands unless its main args = list( filter(None, [command.name if command.name != main.name else "", help_option]) ) # Run the command with the help option result = run_cli(args, invoke_kwargs={"prog_name": MAIN_PROG_NAME}) # Evaluate result assert_exit_code(HELP_EXIT_CODE, result, [MAIN_PROG_NAME, *args]) assert cmd_usage in result.output @pytest.mark.parametrize( "help_option", ("-h", "--help"), ids=lambda opt: opt.lstrip("-") ) @pytest.mark.parametrize( "command", (main, changelog, generate_config, publish, version), ids=lambda cmd: cmd.name, ) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_help_invalid_config( help_option: str, command: Command, run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, ): """ Test that the help message is displayed when the current directory is a git repository and there is an invalid configuration file available. Documented issue #840 """ # Update the configuration file to have an invalid value update_pyproject_toml("tool.semantic_release.remote.type", "invalidhvcs") # Generate some expected output that should be specific per command cmd_usage = str.join( " ", list( filter( None, [ "Usage:", MAIN_PROG_NAME, command.name if command.name != "main" else "", "[OPTIONS]", "" if command.name != main.name else "COMMAND [ARGS]...", ], ) ), ) # Create the arguments list for subcommands unless its main args = list( filter(None, [command.name if command.name != main.name else "", help_option]) ) # Run the command with the help option result = run_cli(args, invoke_kwargs={"prog_name": MAIN_PROG_NAME}) # Evaluate result assert_exit_code(HELP_EXIT_CODE, result, [MAIN_PROG_NAME, *args]) assert cmd_usage in result.output @pytest.mark.parametrize( "help_option", ("-h", "--help"), ids=lambda opt: opt.lstrip("-") ) @pytest.mark.parametrize( "command", (main, changelog, generate_config, publish, version), ids=lambda cmd: cmd.name, ) @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)] ) def test_help_non_release_branch( help_option: str, command: Command, run_cli: RunCliFn, repo_result: BuiltRepoResult, ): """ Test that the help message is displayed even when the current branch is not a release branch. Documented issue #840 """ # Create & checkout a non-release branch non_release_branch = repo_result["repo"].create_head("feature-branch") non_release_branch.checkout() # Generate some expected output that should be specific per command cmd_usage = str.join( " ", list( filter( None, [ "Usage:", MAIN_PROG_NAME, command.name if command.name != "main" else "", "[OPTIONS]", "" if command.name != main.name else "COMMAND [ARGS]...", ], ) ), ) # Create the arguments list for subcommands unless its main args = list( filter(None, [command.name if command.name != main.name else "", help_option]) ) # Run the command with the help option result = run_cli(args, invoke_kwargs={"prog_name": MAIN_PROG_NAME}) # Evaluate result assert_exit_code(HELP_EXIT_CODE, result, [MAIN_PROG_NAME, *args]) assert cmd_usage in result.output python-semantic-release-10.4.1/tests/e2e/test_main.py000066400000000000000000000166331506116242600225230ustar00rootroot00000000000000from __future__ import annotations import json import subprocess from pathlib import Path from shutil import rmtree from textwrap import dedent from typing import TYPE_CHECKING import git import pytest from click.testing import CliRunner from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release import __version__ from tests.const import MAIN_PROG_NAME, SUCCESS_EXIT_CODE, VERSION_SUBCMD from tests.fixtures.repos import repo_w_no_tags_conventional_commits from tests.util import assert_exit_code, assert_successful_exit_code if TYPE_CHECKING: from tests.conftest import RunCliFn from tests.e2e.conftest import StripLoggingMessagesFn from tests.fixtures.example_project import ExProjectDir, UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult @pytest.mark.parametrize( "project_script_name", [ "python-semantic-release", "semantic-release", "psr", ], ) def test_entrypoint_scripts(project_script_name: str): # Setup command = str.join(" ", [project_script_name, "--version"]) expected_output = f"semantic-release, version {__version__}\n" # Act proc = subprocess.run( # noqa: S602, PLW1510 command, shell=True, text=True, capture_output=True ) # Evaluate assert SUCCESS_EXIT_CODE == proc.returncode # noqa: SIM300 assert expected_output == proc.stdout assert not proc.stderr def test_main_prints_version_and_exits(run_cli: RunCliFn): cli_cmd = [MAIN_PROG_NAME, "--version"] # Act result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert result.output == f"semantic-release, version {__version__}\n" def test_main_no_args_passes_w_help_text(): from semantic_release.cli.commands.main import main cli_cmd = [MAIN_PROG_NAME] result = CliRunner().invoke(main, prog_name=cli_cmd[0]) assert_successful_exit_code(result, cli_cmd) assert "Usage: " in result.output @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)], ) def test_not_a_release_branch_exit_code( repo_result: BuiltRepoResult, run_cli: RunCliFn ): # Run anything that doesn't trigger the help text repo_result["repo"].git.checkout("-b", "branch-does-not-exist") # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-commit"] result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)], ) def test_not_a_release_branch_exit_code_with_strict( repo_result: BuiltRepoResult, run_cli: RunCliFn, ): # Run anything that doesn't trigger the help text repo_result["repo"].git.checkout("-b", "branch-does-not-exist") # Act cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, "--no-commit"] result = run_cli(cli_cmd[1:]) # Evaluate assert_exit_code(2, result, cli_cmd) @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)], ) def test_not_a_release_branch_detached_head_exit_code( repo_result: BuiltRepoResult, run_cli: RunCliFn, ): expected_err_msg = ( "Detached HEAD state cannot match any release groups; no release will be made" ) # cause repo to be in detached head state without file changes repo_result["repo"].git.checkout("HEAD", "--detach") # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-commit"] result = run_cli(cli_cmd[1:]) # detached head states should throw an error as release branches cannot be determined assert_exit_code(1, result, cli_cmd) assert expected_err_msg in result.stderr @pytest.fixture def toml_file_with_no_configuration_for_psr(tmp_path: Path) -> Path: path = tmp_path / "config.toml" path.write_text( dedent( r""" [project] name = "foo" version = "1.2.0" """ ) ) return path @pytest.fixture def json_file_with_no_configuration_for_psr(tmp_path: Path) -> Path: path = tmp_path / "config.json" path.write_text(json.dumps({"foo": [1, 2, 3]})) return path @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_default_config_is_used_when_none_in_toml_config_file( run_cli: RunCliFn, toml_file_with_no_configuration_for_psr: Path, ): cli_cmd = [ MAIN_PROG_NAME, "--noop", "--config", str(toml_file_with_no_configuration_for_psr), VERSION_SUBCMD, ] # Act result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_default_config_is_used_when_none_in_json_config_file( run_cli: RunCliFn, json_file_with_no_configuration_for_psr: Path, ): cli_cmd = [ MAIN_PROG_NAME, "--noop", "--config", str(json_file_with_no_configuration_for_psr), VERSION_SUBCMD, ] # Act result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_errors_when_config_file_does_not_exist_and_passed_explicitly( run_cli: RunCliFn, ): cli_cmd = [ MAIN_PROG_NAME, "--noop", "--config", "somenonexistantfile.123.txt", VERSION_SUBCMD, ] # Act result = run_cli(cli_cmd[1:]) # Evaluate assert_exit_code(2, result, cli_cmd) assert "does not exist" in result.stderr @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_errors_when_config_file_invalid_configuration( run_cli: RunCliFn, update_pyproject_toml: UpdatePyprojectTomlFn, strip_logging_messages: StripLoggingMessagesFn, pyproject_toml_file: Path, ): # Setup update_pyproject_toml("tool.semantic_release.remote.type", "invalidType") cli_cmd = [MAIN_PROG_NAME, "--config", str(pyproject_toml_file), VERSION_SUBCMD] # Act result = run_cli(cli_cmd[1:]) # preprocess results stderr_lines = strip_logging_messages(result.stderr).splitlines() # Evaluate assert_exit_code(1, result, cli_cmd) assert "1 validation error for RawConfig" in stderr_lines[0] assert "remote.type" in stderr_lines[1] def test_uses_default_config_when_no_config_file_found( run_cli: RunCliFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): # We have to initialise an empty git repository, as the example projects # all have pyproject.toml configs which would be used by default with git.Repo.init(example_project_dir) as repo: rmtree(str(Path(repo.git_dir, "hooks"))) repo.git.branch("-M", "main") with repo.config_writer("repository") as config: config.set_value("user", "name", "semantic release testing") config.set_value("user", "email", "not_a_real@email.com") config.set_value("commit", "gpgsign", False) config.set_value("tag", "gpgsign", False) repo.create_remote(name="origin", url="foo@barvcs.com:user/repo.git") repo.git.commit("-m", "feat: initial commit", "--allow-empty") cli_cmd = [MAIN_PROG_NAME, "--noop", VERSION_SUBCMD] # Act result = run_cli(cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) python-semantic-release-10.4.1/tests/fixtures/000077500000000000000000000000001506116242600213535ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/fixtures/__init__.py000066400000000000000000000003541506116242600234660ustar00rootroot00000000000000from tests.fixtures.commit_parsers import * from tests.fixtures.example_project import * from tests.fixtures.git_repo import * from tests.fixtures.monorepos import * from tests.fixtures.repos import * from tests.fixtures.scipy import * python-semantic-release-10.4.1/tests/fixtures/commit_parsers.py000066400000000000000000000036611506116242600247620ustar00rootroot00000000000000import pytest from semantic_release.commit_parser import ( ConventionalCommitParser, ConventionalCommitParserOptions, EmojiCommitParser, EmojiParserOptions, ) from tests.const import ( CONVENTIONAL_COMMITS_CHORE, CONVENTIONAL_COMMITS_MAJOR, CONVENTIONAL_COMMITS_MINOR, CONVENTIONAL_COMMITS_PATCH, EMOJI_COMMITS_CHORE, EMOJI_COMMITS_MAJOR, EMOJI_COMMITS_MINOR, EMOJI_COMMITS_PATCH, ) # Note scipy defined in ./scipy.py as already used there @pytest.fixture(scope="session") def default_conventional_parser() -> ConventionalCommitParser: return ConventionalCommitParser() @pytest.fixture(scope="session") def default_conventional_parser_options( default_conventional_parser: ConventionalCommitParser, ) -> ConventionalCommitParserOptions: return default_conventional_parser.get_default_options() @pytest.fixture(scope="session") def default_emoji_parser() -> EmojiCommitParser: return EmojiCommitParser() @pytest.fixture(scope="session") def default_emoji_parser_options( default_emoji_parser: EmojiCommitParser, ) -> EmojiParserOptions: return default_emoji_parser.get_default_options() @pytest.fixture(scope="session") def conventional_major_commits(): return CONVENTIONAL_COMMITS_MAJOR @pytest.fixture(scope="session") def conventional_minor_commits(): return CONVENTIONAL_COMMITS_MINOR @pytest.fixture(scope="session") def conventional_patch_commits(): return CONVENTIONAL_COMMITS_PATCH @pytest.fixture(scope="session") def conventional_chore_commits(): return CONVENTIONAL_COMMITS_CHORE @pytest.fixture(scope="session") def emoji_major_commits(): return EMOJI_COMMITS_MAJOR @pytest.fixture(scope="session") def emoji_minor_commits(): return EMOJI_COMMITS_MINOR @pytest.fixture(scope="session") def emoji_patch_commits(): return EMOJI_COMMITS_PATCH @pytest.fixture(scope="session") def emoji_chore_commits(): return EMOJI_COMMITS_CHORE python-semantic-release-10.4.1/tests/fixtures/example_project.py000066400000000000000000000610211506116242600251060ustar00rootroot00000000000000from __future__ import annotations import os from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING, Generator, cast from unittest import mock import pytest import tomlkit # NOTE: use backport with newer API from importlib_resources import files import semantic_release from semantic_release.cli.config import ( GlobalCommandLineOptions, RawConfig, RuntimeContext, ) from semantic_release.cli.util import load_raw_config_file from semantic_release.commit_parser import ( ConventionalCommitParser, EmojiCommitParser, ScipyCommitParser, ) from semantic_release.commit_parser.conventional.parser_monorepo import ( ConventionalCommitMonorepoParser, ) from semantic_release.hvcs import Bitbucket, Gitea, Github, Gitlab import tests.conftest import tests.const import tests.util from tests.const import ( EXAMPLE_CHANGELOG_MD_CONTENT, EXAMPLE_CHANGELOG_RST_CONTENT, EXAMPLE_PROJECT_NAME, EXAMPLE_PROJECT_VERSION, EXAMPLE_PYPROJECT_TOML_CONTENT, EXAMPLE_RELEASE_NOTES_TEMPLATE, EXAMPLE_SETUP_CFG_CONTENT, EXAMPLE_SETUP_PY_CONTENT, ) from tests.util import copy_dir_tree, temporary_working_directory if TYPE_CHECKING: from typing import Any, Protocol, Sequence from tomlkit.container import Container as TOMLContainer from semantic_release.commit_parser import CommitParser from semantic_release.commit_parser._base import ParserOptions from semantic_release.commit_parser.token import ParseResult from semantic_release.hvcs import HvcsBase from semantic_release.version.version import Version from tests.conftest import ( BuildRepoOrCopyCacheFn, GetMd5ForSetOfFilesFn, ) from tests.fixtures.git_repo import RepoActions ExProjectDir = Path class GetWheelFileFn(Protocol): def __call__(self, version_str: str, pkg_name: str = ...) -> Path: ... class SetFlagFn(Protocol): def __call__(self, flag: bool, toml_file: Path | str = ...) -> None: ... class UpdatePyprojectTomlFn(Protocol): def __call__( self, setting: str, value: Any, toml_file: Path | str = ... ) -> None: ... class UseCustomParserFn(Protocol): def __call__( self, module_import_str: str, toml_file: Path | str = ... ) -> None: ... class UseHvcsFn(Protocol): def __call__( self, domain: str | None = None, toml_file: Path | str = ... ) -> type[HvcsBase]: ... class UseParserFn(Protocol): def __call__( self, toml_file: Path | str = ..., monorepo: bool = ... ) -> type[CommitParser[ParseResult, ParserOptions]]: ... class UseReleaseNotesTemplateFn(Protocol): def __call__(self, toml_file: Path | str = ...) -> None: ... class UpdateVersionPyFileFn(Protocol): def __call__( self, version: Version | str, version_file: Path | str = ... ) -> None: ... class GetHvcsFn(Protocol): def __call__( self, hvcs_client_name: str, origin_url: str = ..., hvcs_domain: str | None = None, ) -> Github | Gitlab | Gitea | Bitbucket: ... class ReadConfigFileFn(Protocol): """Read the raw config file from `config_path`.""" def __call__(self, file: Path | str = ...) -> RawConfig: ... class LoadRuntimeContextFn(Protocol): """Load the runtime context from the config file.""" def __call__( self, cli_opts: GlobalCommandLineOptions | None = None ) -> RuntimeContext: ... class GetParserFromConfigFileFn(Protocol): """Get the commit parser from the config file.""" def __call__( self, file: Path | str = ... ) -> CommitParser[ParseResult, ParserOptions]: ... class GetExpectedVersionPyFileContentFn(Protocol): def __call__(self, version: Version | str) -> str: ... @pytest.fixture(scope="session") def deps_files_4_example_project() -> list[Path]: return [ # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_example_project( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_example_project: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_example_project) @pytest.fixture(scope="session") def cached_example_project( build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, version_py_file: Path, pyproject_toml_file: Path, setup_cfg_file: Path, setup_py_file: Path, changelog_md_file: Path, changelog_rst_file: Path, build_spec_hash_4_example_project: str, update_version_py_file: UpdateVersionPyFileFn, ) -> Path: """ Initializes the example project. DO NOT USE DIRECTLY Use the `init_example_project` fixture instead. """ def _build_project(cached_project_path: Path) -> Sequence[RepoActions]: # purposefully a relative path example_dir = version_py_file.parent gitignore_contents = dedent( f""" *.pyc /src/**/{version_py_file.name} """ ).lstrip() init_py_contents = dedent( ''' """ An example package with a very informative docstring """ from ._version import __version__ def hello_world() -> None: print("Hello World") ''' ).lstrip() with temporary_working_directory(cached_project_path): update_version_py_file(EXAMPLE_PROJECT_VERSION) file_2_contents: list[tuple[str | Path, str]] = [ (example_dir / "__init__.py", init_py_contents), (".gitignore", gitignore_contents), (pyproject_toml_file, EXAMPLE_PYPROJECT_TOML_CONTENT), (setup_cfg_file, EXAMPLE_SETUP_CFG_CONTENT), (setup_py_file, EXAMPLE_SETUP_PY_CONTENT), (changelog_md_file, EXAMPLE_CHANGELOG_MD_CONTENT), (changelog_rst_file, EXAMPLE_CHANGELOG_RST_CONTENT), ] for file, contents in file_2_contents: abs_filepath = cached_project_path.joinpath(file).resolve() # make sure the parent directory exists abs_filepath.parent.mkdir(parents=True, exist_ok=True) # write file contents abs_filepath.write_text(contents) # This is a special build, we don't expose the Repo Actions to the caller return [] # End of _build_project() return build_repo_or_copy_cache( repo_name=f"project_{EXAMPLE_PROJECT_NAME}", build_spec_hash=build_spec_hash_4_example_project, build_repo_func=_build_project, ) @pytest.fixture def init_example_project( example_project_dir: ExProjectDir, cached_example_project: Path, change_to_ex_proj_dir: None, ) -> None: """This fixture initializes the example project in the current test's project directory.""" if not cached_example_project.exists(): raise RuntimeError( f"Unable to find cached project files for {EXAMPLE_PROJECT_NAME}" ) # Copy the cached project files into the current test's project directory copy_dir_tree(cached_example_project, example_project_dir) @pytest.fixture def example_project_with_release_notes_template( init_example_project: None, use_release_notes_template: UseReleaseNotesTemplateFn, ) -> None: use_release_notes_template() @pytest.fixture(scope="session") def version_py_file() -> Path: return Path("src", EXAMPLE_PROJECT_NAME, "_version.py") @pytest.fixture(scope="session") def pyproject_toml_file() -> Path: return Path("pyproject.toml") @pytest.fixture(scope="session") def setup_cfg_file() -> Path: return Path("setup.cfg") @pytest.fixture(scope="session") def setup_py_file() -> Path: return Path("setup.py") @pytest.fixture(scope="session") def dist_dir() -> Path: return Path("dist") @pytest.fixture(scope="session") def changelog_md_file() -> Path: return Path("CHANGELOG.md") @pytest.fixture(scope="session") def changelog_rst_file() -> Path: return Path("CHANGELOG.rst") @pytest.fixture(scope="session") def changelog_template_dir() -> Path: return Path("templates") @pytest.fixture(scope="session") def default_md_changelog_insertion_flag() -> str: return "" @pytest.fixture(scope="session") def default_rst_changelog_insertion_flag() -> str: return f"..{os.linesep} version list" @pytest.fixture(scope="session") def default_changelog_md_template() -> Path: """Retrieve the semantic-release default changelog template file""" return Path( str( files(semantic_release.__name__).joinpath( Path("data", "templates", "conventional", "md", "CHANGELOG.md.j2") ) ) ).resolve() @pytest.fixture(scope="session") def default_changelog_rst_template() -> Path: """Retrieve the semantic-release default changelog template file""" return Path( str( files(semantic_release.__name__).joinpath( Path("data", "templates", "conventional", "rst", "CHANGELOG.rst.j2") ) ) ).resolve() @pytest.fixture(scope="session") def get_wheel_file(dist_dir: Path) -> GetWheelFileFn: def _get_wheel_file( version_str: str, pkg_name: str = EXAMPLE_PROJECT_NAME, ) -> Path: return dist_dir.joinpath( f"{pkg_name.replace('-', '_')}-{version_str}-py3-none-any.whl" ) return _get_wheel_file @pytest.fixture(scope="session") def read_config_file(pyproject_toml_file: Path) -> ReadConfigFileFn: def _read_config_file(file: Path | str = pyproject_toml_file) -> RawConfig: config_text = load_raw_config_file(file) return RawConfig.model_validate(config_text) return _read_config_file @pytest.fixture(scope="session") def load_runtime_context( read_config_file: ReadConfigFileFn, pyproject_toml_file: Path, ) -> LoadRuntimeContextFn: def _load_runtime_context( cli_opts: GlobalCommandLineOptions | None = None, ) -> RuntimeContext: opts = cli_opts or GlobalCommandLineOptions( config_file=str(pyproject_toml_file), ) raw_config = read_config_file(opts.config_file) return RuntimeContext.from_raw_config(raw_config, opts) return _load_runtime_context @pytest.fixture(scope="session") def get_parser_from_config_file( pyproject_toml_file: Path, load_runtime_context: LoadRuntimeContextFn, ) -> GetParserFromConfigFileFn: def _get_parser_from_config( file: Path | str = pyproject_toml_file, ) -> CommitParser[ParseResult, ParserOptions]: return load_runtime_context( cli_opts=GlobalCommandLineOptions(config_file=str(Path(file))) ).commit_parser return _get_parser_from_config @pytest.fixture def example_project_dir(tmp_path: Path) -> ExProjectDir: return tmp_path.resolve() @pytest.fixture def change_to_ex_proj_dir( example_project_dir: ExProjectDir, ) -> Generator[None, None, None]: cwd = os.getcwd() tgt_dir = str(example_project_dir.resolve()) if cwd == tgt_dir: return os.chdir(tgt_dir) try: yield finally: os.chdir(cwd) @pytest.fixture def use_release_notes_template( example_project_template_dir: Path, changelog_template_dir: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_file: Path, ) -> UseReleaseNotesTemplateFn: def _use_release_notes_template( toml_file: Path | str = pyproject_toml_file, ) -> None: update_pyproject_toml( "tool.semantic_release.changelog.template_dir", str(changelog_template_dir), toml_file=toml_file, ) example_project_template_dir.mkdir(parents=True, exist_ok=True) release_notes_j2 = example_project_template_dir / ".release_notes.md.j2" release_notes_j2.write_text(EXAMPLE_RELEASE_NOTES_TEMPLATE) return _use_release_notes_template @pytest.fixture def example_pyproject_toml( example_project_dir: ExProjectDir, pyproject_toml_file: Path, ) -> Path: return example_project_dir / pyproject_toml_file @pytest.fixture def example_setup_cfg( example_project_dir: ExProjectDir, setup_cfg_file: Path, ) -> Path: return example_project_dir / setup_cfg_file @pytest.fixture def example_setup_py( example_project_dir: ExProjectDir, setup_py_file: Path, ) -> Path: return example_project_dir / setup_py_file @pytest.fixture def example_dist_dir( example_project_dir: ExProjectDir, dist_dir: Path, ) -> Path: return example_project_dir / dist_dir @pytest.fixture def example_project_wheel_file( example_dist_dir: Path, get_wheel_file: GetWheelFileFn, ) -> Path: return example_dist_dir / get_wheel_file(EXAMPLE_PROJECT_VERSION) # Note this is just the path and the content may change @pytest.fixture def example_changelog_md( example_project_dir: ExProjectDir, changelog_md_file: Path, ) -> Path: return example_project_dir / changelog_md_file # Note this is just the path and the content may change @pytest.fixture def example_changelog_rst( example_project_dir: ExProjectDir, changelog_rst_file: Path, ) -> Path: return example_project_dir / changelog_rst_file @pytest.fixture def example_project_template_dir( example_project_dir: ExProjectDir, changelog_template_dir: Path, ) -> Path: return example_project_dir / changelog_template_dir @pytest.fixture(scope="session") def get_expected_version_py_file_content() -> GetExpectedVersionPyFileContentFn: def _get_expected_version_py_file_content(version: Version | str) -> str: return dedent( f"""\ __version__ = "{version}" """ ) return _get_expected_version_py_file_content @pytest.fixture(scope="session") def update_version_py_file( version_py_file: Path, get_expected_version_py_file_content: GetExpectedVersionPyFileContentFn, ) -> UpdateVersionPyFileFn: """ Updates the specified file with the expected version string content :param version: The version to set in the file :type version: Version | str :param version_file: The file to update :type version_file: Path | str """ def _update_version_py_file( version: Version | str, version_file: Path | str = version_py_file ) -> None: cwd_version_py = Path(version_file).resolve() cwd_version_py.parent.mkdir(parents=True, exist_ok=True) cwd_version_py.write_text(get_expected_version_py_file_content(version)) return _update_version_py_file @pytest.fixture(scope="session") def update_pyproject_toml(pyproject_toml_file: Path) -> UpdatePyprojectTomlFn: """Update the pyproject.toml file with the given content.""" def _update_pyproject_toml( setting: str, value: Any, toml_file: Path | str = pyproject_toml_file ) -> None: cwd_pyproject_toml = Path(toml_file).resolve() with open(cwd_pyproject_toml) as rfd: pyproject_toml = tomlkit.load(rfd) new_setting = {} parts = setting.split(".") new_setting_key = parts.pop(-1) new_setting[new_setting_key] = value pointer: TOMLContainer = pyproject_toml for part in parts: if (next_pointer := pointer.get(part, None)) is None: next_pointer = tomlkit.table() pointer.add(part, next_pointer) pointer = cast("TOMLContainer", next_pointer) if value is None: pointer.pop(new_setting_key) else: pointer.update(new_setting) with open(cwd_pyproject_toml, "w") as wfd: tomlkit.dump(pyproject_toml, wfd) return _update_pyproject_toml @pytest.fixture(scope="session") def pyproject_toml_config_option_parser() -> str: return f"tool.{semantic_release.__name__}.commit_parser" @pytest.fixture(scope="session") def pyproject_toml_config_option_remote_type() -> str: return f"tool.{semantic_release.__name__}.remote.type" @pytest.fixture(scope="session") def pyproject_toml_config_option_remote_domain() -> str: return f"tool.{semantic_release.__name__}.remote.domain" @pytest.fixture(scope="session") def set_major_on_zero( pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn ) -> SetFlagFn: """Turn on/off the major_on_zero setting.""" def _set_major_on_zero( flag: bool, toml_file: Path | str = pyproject_toml_file ) -> None: update_pyproject_toml("tool.semantic_release.major_on_zero", flag, toml_file) return _set_major_on_zero @pytest.fixture(scope="session") def set_allow_zero_version( pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn ) -> SetFlagFn: """Turn on/off the allow_zero_version setting.""" def _set_allow_zero_version( flag: bool, toml_file: Path | str = pyproject_toml_file ) -> None: update_pyproject_toml( "tool.semantic_release.allow_zero_version", flag, toml_file ) return _set_allow_zero_version @pytest.fixture(scope="session") def use_conventional_parser( pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseParserFn: """Modify the configuration file to use the Conventional parser.""" def _use_conventional_parser( toml_file: Path | str = pyproject_toml_file, monorepo: bool = False ) -> type[CommitParser[ParseResult, ParserOptions]]: update_pyproject_toml( pyproject_toml_config_option_parser, f"conventional{'-monorepo' if monorepo else ''}", toml_file=toml_file, ) return cast( "type[CommitParser[ParseResult, ParserOptions]]", ConventionalCommitMonorepoParser if monorepo else ConventionalCommitParser, ) return _use_conventional_parser @pytest.fixture(scope="session") def use_emoji_parser( pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseParserFn: """Modify the configuration file to use the Emoji parser.""" def _use_emoji_parser( toml_file: Path | str = pyproject_toml_file, monorepo: bool = False ) -> type[CommitParser[ParseResult, ParserOptions]]: if monorepo: raise ValueError( "The Emoji parser does not support monorepo mode. " "Use the conventional parser instead." ) update_pyproject_toml( pyproject_toml_config_option_parser, "emoji", toml_file=toml_file ) return cast("type[CommitParser[ParseResult, ParserOptions]]", EmojiCommitParser) return _use_emoji_parser @pytest.fixture(scope="session") def use_scipy_parser( pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseParserFn: """Modify the configuration file to use the Scipy parser.""" def _use_scipy_parser( toml_file: Path | str = pyproject_toml_file, monorepo: bool = False ) -> type[CommitParser[ParseResult, ParserOptions]]: if monorepo: raise ValueError( "The Scipy parser does not support monorepo mode. " "Use the conventional parser instead." ) update_pyproject_toml( pyproject_toml_config_option_parser, "scipy", toml_file=toml_file ) return cast("type[CommitParser[ParseResult, ParserOptions]]", ScipyCommitParser) return _use_scipy_parser @pytest.fixture(scope="session") def use_custom_parser( pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseCustomParserFn: """Modify the configuration file to use a user defined string parser.""" def _use_custom_parser( module_import_str: str, toml_file: Path | str = pyproject_toml_file ) -> None: update_pyproject_toml( pyproject_toml_config_option_parser, module_import_str, toml_file=toml_file ) return _use_custom_parser @pytest.fixture(scope="session") def get_hvcs(example_git_https_url: str) -> GetHvcsFn: hvcs_clients: dict[str, type[HvcsBase]] = { "github": Github, "gitlab": Gitlab, "gitea": Gitea, "bitbucket": Bitbucket, } def _get_hvcs( hvcs_client_name: str, origin_url: str = example_git_https_url, hvcs_domain: str | None = None, ) -> Github | Gitlab | Gitea | Bitbucket: if (hvcs_class := hvcs_clients.get(hvcs_client_name)) is None: raise ValueError(f"Unknown HVCS client name: {hvcs_client_name}") # Create HVCS Client instance with mock.patch.dict(os.environ, {}, clear=True): hvcs = hvcs_class(origin_url, hvcs_domain=hvcs_domain) assert hvcs.repo_name # Force the HVCS client to cache the repo name assert hvcs.owner # Force the HVCS client to cache the owner return cast("Github | Gitlab | Gitea | Bitbucket", hvcs) return _get_hvcs @pytest.fixture(scope="session") def use_github_hvcs( pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_remote_type: str, pyproject_toml_config_option_remote_domain: str, ) -> UseHvcsFn: """Modify the configuration file to use GitHub as the HVCS.""" def _use_github_hvcs( domain: str | None = None, toml_file: Path | str = pyproject_toml_file ) -> type[HvcsBase]: update_pyproject_toml( pyproject_toml_config_option_remote_type, Github.__name__.lower(), toml_file=toml_file, ) if domain is not None: update_pyproject_toml( pyproject_toml_config_option_remote_domain, domain, toml_file=toml_file ) return Github return _use_github_hvcs @pytest.fixture(scope="session") def use_gitlab_hvcs( pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_remote_type: str, pyproject_toml_config_option_remote_domain: str, ) -> UseHvcsFn: """Modify the configuration file to use GitLab as the HVCS.""" def _use_gitlab_hvcs( domain: str | None = None, toml_file: Path | str = pyproject_toml_file ) -> type[HvcsBase]: update_pyproject_toml( pyproject_toml_config_option_remote_type, Gitlab.__name__.lower(), toml_file=toml_file, ) if domain is not None: update_pyproject_toml( pyproject_toml_config_option_remote_domain, domain, toml_file=toml_file ) return Gitlab return _use_gitlab_hvcs @pytest.fixture(scope="session") def use_gitea_hvcs( pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_remote_type: str, pyproject_toml_config_option_remote_domain: str, ) -> UseHvcsFn: """Modify the configuration file to use Gitea as the HVCS.""" def _use_gitea_hvcs( domain: str | None = None, toml_file: Path | str = pyproject_toml_file ) -> type[HvcsBase]: update_pyproject_toml( pyproject_toml_config_option_remote_type, Gitea.__name__.lower(), toml_file=toml_file, ) if domain is not None: update_pyproject_toml( pyproject_toml_config_option_remote_domain, domain, toml_file=toml_file ) return Gitea return _use_gitea_hvcs @pytest.fixture(scope="session") def use_bitbucket_hvcs( pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_remote_type: str, pyproject_toml_config_option_remote_domain: str, ) -> UseHvcsFn: """Modify the configuration file to use BitBucket as the HVCS.""" def _use_bitbucket_hvcs( domain: str | None = None, toml_file: Path | str = pyproject_toml_file ) -> type[HvcsBase]: update_pyproject_toml( pyproject_toml_config_option_remote_type, Bitbucket.__name__.lower(), toml_file=toml_file, ) if domain is not None: update_pyproject_toml( pyproject_toml_config_option_remote_domain, domain, toml_file=toml_file ) return Bitbucket return _use_bitbucket_hvcs python-semantic-release-10.4.1/tests/fixtures/git_repo.py000066400000000000000000002670131506116242600235460ustar00rootroot00000000000000from __future__ import annotations import os import sys from copy import deepcopy from datetime import datetime, timedelta from functools import reduce from itertools import count from pathlib import Path from shutil import rmtree from textwrap import dedent from time import sleep from typing import TYPE_CHECKING, TypeVar, cast from unittest import mock import pytest from git import Actor, Repo from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.hvcs.bitbucket import Bitbucket from semantic_release.hvcs.gitea import Gitea from semantic_release.hvcs.github import Github from semantic_release.hvcs.gitlab import Gitlab from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( COMMIT_MESSAGE, DEFAULT_BRANCH_NAME, DEFAULT_MERGE_STRATEGY_OPTION, EXAMPLE_HVCS_DOMAIN, EXAMPLE_PROJECT_NAME, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER, NULL_HEX_SHA, RepoActionStep, ) from tests.util import ( add_text_to_file, copy_dir_tree, temporary_working_directory, ) if TYPE_CHECKING: from typing import ( Any, Dict, Generator, Generic, List, Literal, Protocol, Sequence, Set, Tuple, TypedDict, TypeVar, Union, ) from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional import ( ConventionalCommitParser, ) from semantic_release.commit_parser.conventional.parser_monorepo import ( ConventionalCommitMonorepoParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParsedMessageResult, ParseResult from tests.fixtures.example_project import ( GetHvcsFn, GetParserFromConfigFileFn, UpdateVersionPyFileFn, ) from tests.fixtures.monorepos.git_monorepo import BuildMonorepoFn try: # Python 3.8 and 3.9 compatibility from typing_extensions import TypeAlias except ImportError: from typing import TypeAlias # type: ignore[attr-defined, no-redef] from typing_extensions import NotRequired from semantic_release.hvcs import HvcsBase from tests.conftest import ( BuildRepoOrCopyCacheFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ( ExProjectDir, GetWheelFileFn, UpdatePyprojectTomlFn, UseCustomParserFn, UseHvcsFn, UseParserFn, ) CommitConvention = Literal["conventional", "emoji", "scipy"] VersionStr = str CommitMsg = str DatetimeISOStr = str ChangelogTypeHeading = str TomlSerializableTypes = Union[ Dict[Any, Any], Set[Any], List[Any], Tuple[Any, ...], int, float, bool, str ] class RepoVersionDef(TypedDict): """ A reduced common repo definition, that is specific to a type of commit conventions Used for builder functions that only need to know about a single commit convention type """ commits: list[CommitDef] class BaseAccumulatorVersionReduction(TypedDict): version_limit: Version repo_def: RepoDefinition class ChangelogTypeHeadingDef(TypedDict): section: ChangelogTypeHeading i_commits: list[int] """List of indexes values to match to the commits list in the RepoVersionDef""" class CommitDef(TypedDict): cid: str msg: CommitMsg type: str category: str desc: str brking_desc: str scope: str mr: str sha: str datetime: NotRequired[DatetimeISOStr] include_in_changelog: bool class BaseRepoVersionDef(TypedDict): """A Common Repo definition for a get_commits_repo_*() fixture with all commit convention types""" changelog_sections: dict[CommitConvention, list[ChangelogTypeHeadingDef]] commits: list[dict[CommitConvention, str]] class BuildRepoFn(Protocol): def __call__( self, dest_dir: Path | str, commit_type: CommitConvention = ..., hvcs_client_name: str = ..., hvcs_domain: str = ..., tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, # Default as of v10 package_name: str = ..., monorepo: bool = False, ) -> tuple[Path, HvcsBase]: ... class CommitNReturnChangelogEntryFn(Protocol): def __call__(self, git_repo: Repo, commit_def: CommitDef) -> CommitDef: ... class SimulateChangeCommitsNReturnChangelogEntryFn(Protocol): def __call__( self, git_repo: Repo, commit_msgs: Sequence[CommitDef] ) -> Sequence[CommitDef]: ... class CreateReleaseFn(Protocol): def __call__( self, git_repo: Repo, version: str, tag_format: str = ..., timestamp: DatetimeISOStr | None = None, version_py_file: Path | str = ..., commit_message_format: str = ..., ) -> None: ... class ExProjectGitRepoFn(Protocol): def __call__(self) -> Repo: ... class ExtractRepoDefinitionFn(Protocol): def __call__( self, base_repo_def: dict[str, BaseRepoVersionDef], commit_type: CommitConvention, ) -> RepoDefinition: ... T_contra = TypeVar("T_contra", contravariant=True) class GetCommitDefFn(Protocol[T_contra]): def __call__(self, msg: str, parser: T_contra) -> CommitDef: ... class GetVersionStringsFn(Protocol): def __call__(self) -> list[VersionStr]: ... class GetCommitsFromRepoBuildDefFn(Protocol): def __call__( self, build_definition: Sequence[RepoActions], filter_4_changelog: bool = False, ignore_merge_commits: bool = False, ) -> RepoDefinition: ... RepoDefinition: TypeAlias = dict[VersionStr, RepoVersionDef] # type: ignore[misc] # mypy is thoroughly confused """ A Type alias to define a repositories versions, commits, and changelog sections for a specific commit convention """ class SimulateDefaultChangelogCreationFn(Protocol): def __call__( self, repo_definition: RepoDefinition, hvcs: Github | Gitlab | Gitea | Bitbucket, dest_file: Path | None = None, max_version: Version | Literal["Unreleased"] | None = None, output_format: ChangelogOutputFormat = ChangelogOutputFormat.MARKDOWN, mask_initial_release: bool = True, # Default as of v10 ) -> str: ... class FormatGitSquashCommitMsgFn(Protocol): def __call__( self, squashed_commits: list[CommitDef], ) -> str: ... class FormatGitHubSquashCommitMsgFn(Protocol): def __call__( self, pr_title: str, pr_number: int, squashed_commits: list[CommitDef | str], ) -> str: ... class FormatBitBucketSquashCommitMsgFn(Protocol): def __call__( self, branch_name: str, pr_title: str, pr_number: int, squashed_commits: list[CommitDef], ) -> str: ... class FormatGitMergeCommitMsgFn(Protocol): def __call__(self, branch_name: str, tgt_branch_name: str) -> str: ... class FormatGitHubMergeCommitMsgFn(Protocol): def __call__(self, pr_number: int, branch_name: str) -> str: ... class FormatGitLabMergeCommitMsgFn(Protocol): def __call__( self, mr_title: str, mr_number: int, source_branch: str, target_branch: str, closed_issues: list[str], ) -> str: ... class CreateMergeCommitFn(Protocol): def __call__( self, git_repo: Repo, branch_name: str, commit_def: CommitDef, fast_forward: bool = True, strategy_option: str = DEFAULT_MERGE_STRATEGY_OPTION, ) -> CommitDef: ... class CreateSquashMergeCommitFn(Protocol): def __call__( self, git_repo: Repo, branch_name: str, commit_def: CommitDef, strategy_option: str = DEFAULT_MERGE_STRATEGY_OPTION, ) -> CommitDef: ... class CommitSpec(TypedDict): cid: str conventional: str emoji: str scipy: str datetime: NotRequired[DatetimeISOStr] include_in_changelog: NotRequired[bool] class DetailsBase(TypedDict): pre_actions: NotRequired[Sequence[RepoActions]] post_actions: NotRequired[Sequence[RepoActions]] class RepoActionConfigure(TypedDict): action: Literal[RepoActionStep.CONFIGURE] details: RepoActionConfigureDetails class RepoActionConfigureDetails(DetailsBase): commit_type: CommitConvention hvcs_client_name: str hvcs_domain: str tag_format_str: str | None mask_initial_release: bool extra_configs: dict[str, TomlSerializableTypes] class RepoActionConfigureMonorepo(TypedDict): action: Literal[RepoActionStep.CONFIGURE_MONOREPO] details: RepoActionConfigureMonorepoDetails class RepoActionConfigureMonorepoDetails(DetailsBase): package_dir: Path | str package_name: str tag_format_str: str | None mask_initial_release: bool extra_configs: dict[str, TomlSerializableTypes] class RepoActionCreateMonorepo(TypedDict): action: Literal[RepoActionStep.CREATE_MONOREPO] details: RepoActionCreateMonorepoDetails class RepoActionCreateMonorepoDetails(DetailsBase): commit_type: CommitConvention hvcs_client_name: str hvcs_domain: str origin_url: NotRequired[str] class RepoActionChangeDirectory(TypedDict): action: Literal[RepoActionStep.CHANGE_DIRECTORY] details: RepoActionChangeDirectoryDetails class RepoActionChangeDirectoryDetails(DetailsBase): directory: Path | str class RepoActionMakeCommits(TypedDict): action: Literal[RepoActionStep.MAKE_COMMITS] details: RepoActionMakeCommitsDetails class RepoActionMakeCommitsDetails(DetailsBase): commits: Sequence[CommitDef] class RepoActionRelease(TypedDict): action: Literal[RepoActionStep.RELEASE] details: RepoActionReleaseDetails class RepoActionReleaseDetails(DetailsBase): commit_message_format: NotRequired[str] datetime: DatetimeISOStr tag_format: NotRequired[str] version: str version_py_file: NotRequired[Path | str] class RepoActionGitCheckout(TypedDict): action: Literal[RepoActionStep.GIT_CHECKOUT] details: RepoActionGitCheckoutDetails class RepoActionGitCheckoutDetails(DetailsBase): create_branch: NotRequired[RepoActionGitCheckoutCreateBranch] branch: NotRequired[str] class RepoActionGitCheckoutCreateBranch(TypedDict): name: str start_branch: str class RepoActionGitSquash(TypedDict): action: Literal[RepoActionStep.GIT_SQUASH] details: RepoActionGitSquashDetails class RepoActionGitSquashDetails(DetailsBase): branch: str strategy_option: str commit_def: CommitDef config_file: NotRequired[Path | str] class RepoActionGitMergeDetails(DetailsBase): branch_name: str commit_def: CommitDef fast_forward: Literal[False] strategy_option: NotRequired[str] class RepoActionGitFFMergeDetails(DetailsBase): branch_name: str fast_forward: Literal[True] MergeDetails = TypeVar( "MergeDetails", bound=Union[RepoActionGitMergeDetails, RepoActionGitFFMergeDetails], ) class RepoActionGitMerge(Generic[MergeDetails], TypedDict): action: Literal[RepoActionStep.GIT_MERGE] details: MergeDetails class RepoActionWriteChangelogs(TypedDict): action: Literal[RepoActionStep.WRITE_CHANGELOGS] details: RepoActionWriteChangelogsDetails class RepoActionWriteChangelogsDetails(DetailsBase): new_version: Version | Literal["Unreleased"] max_version: NotRequired[Version | Literal["Unreleased"]] dest_files: Sequence[RepoActionWriteChangelogsDestFile] commit_ids: Sequence[str] class RepoActionWriteChangelogsDestFile(TypedDict): path: Path | str format: ChangelogOutputFormat mask_initial_release: bool class ConvertCommitSpecToCommitDefFn(Protocol): def __call__( self, commit_spec: CommitSpec, commit_type: CommitConvention, parser: CommitParser[ParseResult, ParserOptions], monorepo: bool = ..., ) -> CommitDef: ... class GetRepoDefinitionFn(Protocol): def __call__( self, commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = ..., ignore_merge_commits: bool = True, # Default as of v10 ) -> Sequence[RepoActions]: ... class BuildRepoFromDefinitionFn(Protocol): def __call__( self, dest_dir: Path | str, repo_construction_steps: Sequence[RepoActions], ) -> Sequence[RepoActions]: ... class BuiltRepoResult(TypedDict): definition: Sequence[RepoActions] repo: Repo class GetVersionsFromRepoBuildDefFn(Protocol): def __call__(self, repo_def: Sequence[RepoActions]) -> Sequence[Version]: ... class ConvertCommitSpecsToCommitDefsFn(Protocol): def __call__( self, commits: Sequence[CommitSpec], commit_type: CommitConvention, parser: CommitParser[ParseResult, ParserOptions], monorepo: bool = ..., ) -> Sequence[CommitDef]: ... class BuildSpecificRepoFn(Protocol): def __call__( self, repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: ... RepoActions: TypeAlias = Union[ RepoActionChangeDirectory, RepoActionConfigure, RepoActionConfigureMonorepo, RepoActionCreateMonorepo, RepoActionGitCheckout, RepoActionGitMerge[RepoActionGitMergeDetails], RepoActionGitMerge[RepoActionGitFFMergeDetails], RepoActionGitSquash, RepoActionMakeCommits, RepoActionRelease, RepoActionWriteChangelogs, ] class GetGitRepo4DirFn(Protocol): def __call__(self, directory: Path | str) -> Repo: ... class SplitRepoActionsByReleaseTagsFn(Protocol): def __call__( self, repo_definition: Sequence[RepoActions], ) -> dict[Version | Literal["Unreleased"] | None, list[RepoActions]]: ... class GetCfgValueFromDefFn(Protocol): def __call__( self, build_definition: Sequence[RepoActions], key: str ) -> Any: ... class SquashedCommitSupportedParser(Protocol): def unsquash_commit_message(self, message: str) -> list[str]: ... def parse_message(self, message: str) -> ParsedMessageResult | None: ... class SeparateSquashedCommitDefFn(Protocol): def __call__( self, squashed_commit_def: CommitDef, parser: SquashedCommitSupportedParser ) -> list[CommitDef]: ... class GenerateDefaultReleaseNotesFromDefFn(Protocol): def __call__( self, version_actions: Sequence[RepoActions], hvcs: Github | Gitlab | Gitea | Bitbucket, previous_version: Version | None = None, license_name: str = "", dest_file: Path | None = None, mask_initial_release: bool = True, # Default as of v10 ) -> str: ... class GetHvcsClientFromRepoDefFn(Protocol): def __call__( self, repo_def: Sequence[RepoActions], ) -> Github | Gitlab | Gitea | Bitbucket: ... @pytest.fixture(scope="session") def deps_files_4_example_git_project( deps_files_4_example_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_example_git_project( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_example_git_project: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_example_git_project) @pytest.fixture(scope="session") def cached_example_git_project( build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_example_git_project: str, cached_example_project: Path, example_git_https_url: str, commit_author: Actor, ) -> Path: """ Initializes an example project with git repo. DO NOT USE DIRECTLY. Use a `repo_*` fixture instead. This creates a default base repository, all settings can be changed later through from the example_project_git_repo fixture's return object and manual adjustment. """ def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: if not cached_example_project.exists(): raise RuntimeError("Unable to find cached project files") # make a copy of the example project as a base copy_dir_tree(cached_example_project, cached_repo_path) # initialize git repo (open and close) # NOTE: We don't want to hold the repo object open for the entire test session, # the implementation on Windows holds some file descriptors open until close is called. with Repo.init(cached_repo_path) as repo: rmtree(str(Path(repo.git_dir, "hooks"))) # Without this the global config may set it to "master", we want consistency repo.git.branch("-M", DEFAULT_BRANCH_NAME) with repo.config_writer("repository") as config: config.set_value("user", "name", commit_author.name) config.set_value("user", "email", commit_author.email) config.set_value("commit", "gpgsign", False) config.set_value("tag", "gpgsign", False) repo.create_remote(name="origin", url=example_git_https_url) # make sure all base files are in index to enable initial commit repo.index.add(("*", ".gitignore")) # This is a special build, we don't expose the Repo Actions to the caller return [] # End of _build_repo() return build_repo_or_copy_cache( repo_name=cached_example_git_project.__name__.split("_", maxsplit=1)[1], build_spec_hash=build_spec_hash_4_example_git_project, build_repo_func=_build_repo, ) @pytest.fixture(scope="session") def commit_author(): return Actor(name="semantic release testing", email="not_a_real@email.com") @pytest.fixture(scope="session") def default_tag_format_str() -> str: return "v{version}" @pytest.fixture(scope="session") def file_in_repo(): return "file.txt" @pytest.fixture(scope="session") def example_git_ssh_url(): return f"git@{EXAMPLE_HVCS_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git" @pytest.fixture(scope="session") def example_git_https_url(): return f"https://{EXAMPLE_HVCS_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git" @pytest.fixture(scope="session") def get_commit_def_of_conventional_commit() -> GetCommitDefFn[ConventionalCommitParser]: def _get_commit_def(msg: str, parser: ConventionalCommitParser) -> CommitDef: if not (parsed_result := parser.parse_message(msg)): return { "cid": "", "msg": msg, "type": "unknown", "category": "Unknown", "desc": msg, "brking_desc": "", "scope": "", "mr": "", "sha": NULL_HEX_SHA, "include_in_changelog": False, } return { "cid": "", "msg": msg, "type": parsed_result.type, "category": parsed_result.category, "desc": str.join("\n\n", parsed_result.descriptions), "brking_desc": str.join("\n\n", parsed_result.breaking_descriptions), "scope": parsed_result.scope, "mr": parsed_result.linked_merge_request, "sha": NULL_HEX_SHA, "include_in_changelog": True, } return _get_commit_def @pytest.fixture(scope="session") def get_commit_def_of_conventional_commit_monorepo() -> ( GetCommitDefFn[ConventionalCommitMonorepoParser] ): def _get_commit_def( msg: str, parser: ConventionalCommitMonorepoParser ) -> CommitDef: if not (parsed_result := parser.parse_message(msg)): return { "cid": "", "msg": msg, "type": "unknown", "category": "Unknown", "desc": msg, "brking_desc": "", "scope": "", "mr": "", "sha": NULL_HEX_SHA, "include_in_changelog": False, } return { "cid": "", "msg": msg, "type": parsed_result.type, "category": parsed_result.category, "desc": str.join("\n\n", parsed_result.descriptions), "brking_desc": str.join("\n\n", parsed_result.breaking_descriptions), "scope": parsed_result.scope, "mr": parsed_result.linked_merge_request, "sha": NULL_HEX_SHA, "include_in_changelog": True, } return _get_commit_def @pytest.fixture(scope="session") def get_commit_def_of_emoji_commit() -> GetCommitDefFn[EmojiCommitParser]: def _get_commit_def_of_emoji_commit( msg: str, parser: EmojiCommitParser ) -> CommitDef: if not (parsed_result := parser.parse_message(msg)): return { "cid": "", "msg": msg, "type": "unknown", "category": "Other", "desc": msg, "brking_desc": "", "scope": "", "mr": "", "sha": NULL_HEX_SHA, "include_in_changelog": False, } return { "cid": "", "msg": msg, "type": parsed_result.type, "category": parsed_result.category, "desc": str.join("\n\n", parsed_result.descriptions), "brking_desc": str.join("\n\n", parsed_result.breaking_descriptions), "scope": parsed_result.scope, "mr": parsed_result.linked_merge_request, "sha": NULL_HEX_SHA, "include_in_changelog": True, } return _get_commit_def_of_emoji_commit @pytest.fixture(scope="session") def get_commit_def_of_scipy_commit() -> GetCommitDefFn[ScipyCommitParser]: def _get_commit_def_of_scipy_commit( msg: str, parser: ScipyCommitParser ) -> CommitDef: if not (parsed_result := parser.parse_message(msg)): return { "cid": "", "msg": msg, "type": "unknown", "category": "Unknown", "desc": msg, "brking_desc": "", "scope": "", "mr": "", "sha": NULL_HEX_SHA, "include_in_changelog": False, } return { "cid": "", "msg": msg, "type": parsed_result.type, "category": parsed_result.category, "desc": str.join("\n\n", parsed_result.descriptions), "brking_desc": str.join("\n\n", parsed_result.breaking_descriptions), "scope": parsed_result.scope, "mr": parsed_result.linked_merge_request, "sha": NULL_HEX_SHA, "include_in_changelog": True, } return _get_commit_def_of_scipy_commit @pytest.fixture(scope="session") def format_merge_commit_msg_git() -> FormatGitMergeCommitMsgFn: def _format_merge_commit_msg_git(branch_name: str, tgt_branch_name: str) -> str: return f"Merge branch '{branch_name}' into '{tgt_branch_name}'" return _format_merge_commit_msg_git @pytest.fixture(scope="session") def format_merge_commit_msg_github() -> FormatGitHubMergeCommitMsgFn: def _format_merge_commit_msg_git(pr_number: int, branch_name: str) -> str: return f"Merge pull request #{pr_number} from '{branch_name}'" return _format_merge_commit_msg_git @pytest.fixture(scope="session") def format_merge_commit_msg_gitlab() -> FormatGitLabMergeCommitMsgFn: def _format_merge_commit_msg( mr_title: str, mr_number: int, source_branch: str, target_branch: str, closed_issues: list[str], ) -> str: """REF: https://docs.gitlab.com/17.8/ee/user/project/merge_requests/commit_templates.html""" reference = f"{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}!{mr_number}" issue_statement = ( "" if not closed_issues else str.join( " ", [ "Closes", str.join( " and ", [str.join(", ", closed_issues[:-1]), closed_issues[-1]] ) if len(closed_issues) > 1 else closed_issues[0], ], ) ) return str.join( "\n\n", filter( None, [ f"Merge branch '{source_branch}' into '{target_branch}'", f"{mr_title}", f"{issue_statement}", f"See merge request {reference}", ], ), ) return _format_merge_commit_msg @pytest.fixture(scope="session") def format_squash_commit_msg_git(commit_author: Actor) -> FormatGitSquashCommitMsgFn: def _format_squash_commit_msg_git( squashed_commits: list[CommitDef], ) -> str: return ( str.join( "\n\n", [ "Squashed commit of the following:", *[ str.join( "\n", [ f"commit {commit['sha']}", f"Author: {commit_author.name} <{commit_author.email}>", # TODO: get date from CommitDef object "Date: Day Mon DD HH:MM:SS YYYY +HHMM", "", *[f" {line}" for line in commit["msg"].split("\n")], ], ) for commit in squashed_commits ], ], ) + "\n" ) return _format_squash_commit_msg_git @pytest.fixture(scope="session") def format_squash_commit_msg_github() -> FormatGitHubSquashCommitMsgFn: def _format_squash_commit_msg_github( pr_title: str, pr_number: int, squashed_commits: list[CommitDef | str], ) -> str: sq_commits: list[str] = ( cast("list[str]", squashed_commits) if len(squashed_commits) > 1 and not isinstance(squashed_commits[0], dict) else list( filter( None, [ commit.get("msg", "") if isinstance(commit, dict) else commit for commit in squashed_commits ], ) ) ) pr_title_parts = pr_title.strip().split("\n\n", maxsplit=1) return ( str.join( "\n\n", [ f"{pr_title_parts[0]} (#{pr_number})", *pr_title_parts[1:], *[f"* {commit_str}" for commit_str in sq_commits], ], ) + "\n" ) return _format_squash_commit_msg_github @pytest.fixture(scope="session") def format_squash_commit_msg_bitbucket() -> FormatBitBucketSquashCommitMsgFn: def _format_squash_commit_msg_bitbucket( branch_name: str, pr_title: str, pr_number: int, squashed_commits: list[CommitDef], ) -> str: # See #1085, for detail on BitBucket squash commit message format return ( str.join( "\n\n", [ f"Merged in {branch_name} (pull request #{pr_number})", f"{pr_title}", *[f"* {commit_str}" for commit_str in squashed_commits], ], ) + "\n" ) return _format_squash_commit_msg_bitbucket @pytest.fixture(scope="session") def create_merge_commit(stable_now_date: GetStableDateNowFn) -> CreateMergeCommitFn: def _create_merge_commit( git_repo: Repo, branch_name: str, commit_def: CommitDef, fast_forward: bool = True, strategy_option: str = DEFAULT_MERGE_STRATEGY_OPTION, ) -> CommitDef: curr_dt = stable_now_date() commit_dt = ( datetime.fromisoformat(commit_def["datetime"]) if "datetime" in commit_def else curr_dt ) timestamp = commit_dt.isoformat(timespec="seconds") if curr_dt == commit_dt: sleep(1) # ensure commit timestamps are unique with git_repo.git.custom_environment( GIT_AUTHOR_DATE=timestamp, GIT_COMMITTER_DATE=timestamp, ): git_repo.git.merge( branch_name, ff=fast_forward, no_ff=bool(not fast_forward), m=commit_def["msg"], strategy_option=strategy_option, ) # return the commit definition with the sha & message updated return { **commit_def, "msg": str(git_repo.head.commit.message).strip(), "sha": git_repo.head.commit.hexsha, } return _create_merge_commit @pytest.fixture(scope="session") def create_squash_merge_commit( stable_now_date: GetStableDateNowFn, ) -> CreateSquashMergeCommitFn: def _create_squash_merge_commit( git_repo: Repo, branch_name: str, commit_def: CommitDef, strategy_option: str = DEFAULT_MERGE_STRATEGY_OPTION, ) -> CommitDef: curr_dt = stable_now_date() commit_dt = ( datetime.fromisoformat(commit_def["datetime"]) if "datetime" in commit_def else curr_dt ) if curr_dt == commit_dt: sleep(1) # ensure commit timestamps are unique # merge --squash never commits on action, first it stages the changes git_repo.git.merge( branch_name, squash=True, strategy_option=strategy_option, ) # commit the squashed changes git_repo.git.commit( m=commit_def["msg"], date=commit_dt.isoformat(timespec="seconds"), ) # return the commit definition with the sha & message updated return { **commit_def, "msg": str(git_repo.head.commit.message).strip(), "sha": git_repo.head.commit.hexsha, } return _create_squash_merge_commit @pytest.fixture(scope="session") def create_release_tagged_commit( update_pyproject_toml: UpdatePyprojectTomlFn, update_version_py_file: UpdateVersionPyFileFn, default_tag_format_str: str, stable_now_date: GetStableDateNowFn, ) -> CreateReleaseFn: def _mimic_semantic_release_commit( git_repo: Repo, version: Version | str, tag_format: str = default_tag_format_str, timestamp: DatetimeISOStr | None = None, version_py_file: Path | str = "", commit_message_format: str = COMMIT_MESSAGE, ) -> None: curr_dt = stable_now_date() commit_dt = ( datetime.fromisoformat(timestamp) if isinstance(timestamp, str) else curr_dt ) if curr_dt == commit_dt: sleep(1) # ensure commit timestamps are unique # stamp version into version file if version_py_file: update_version_py_file(version=version, version_file=version_py_file) else: update_version_py_file(version=version) # stamp version into pyproject.toml update_pyproject_toml("tool.poetry.version", str(version)) # commit --all files with version number commit message git_repo.git.commit( a=True, m=commit_message_format.format(version=str(version)), date=commit_dt.isoformat(timespec="seconds"), ) # ensure commit timestamps are unique (adding one second even though a nanosecond has gone by) commit_dt += timedelta(seconds=1) with git_repo.git.custom_environment( GIT_COMMITTER_DATE=commit_dt.isoformat(timespec="seconds"), ): # tag commit with version number tag_str = tag_format.format(version=str(version)) git_repo.git.tag(tag_str, m=tag_str) return _mimic_semantic_release_commit @pytest.fixture(scope="session") def commit_n_rtn_changelog_entry( stable_now_date: GetStableDateNowFn, ) -> CommitNReturnChangelogEntryFn: def _commit_n_rtn_changelog_entry( git_repo: Repo, commit_def: CommitDef ) -> CommitDef: # make commit with --all files curr_dt = stable_now_date() commit_dt = ( datetime.fromisoformat(commit_def["datetime"]) if "datetime" in commit_def else curr_dt ) if curr_dt == commit_dt: sleep(1) # ensure commit timestamps are unique git_repo.git.commit( a=True, m=commit_def["msg"], date=commit_dt.isoformat(timespec="seconds"), ) # Capture the resulting commit message and sha return { **commit_def, "msg": str(git_repo.head.commit.message).strip(), "sha": git_repo.head.commit.hexsha, } return _commit_n_rtn_changelog_entry @pytest.fixture(scope="session") def simulate_change_commits_n_rtn_changelog_entry( commit_n_rtn_changelog_entry: CommitNReturnChangelogEntryFn, file_in_repo: str, ) -> SimulateChangeCommitsNReturnChangelogEntryFn: def _simulate_change_commits_n_rtn_changelog_entry( git_repo: Repo, commit_msgs: Sequence[CommitDef] ) -> Sequence[CommitDef]: changelog_entries: list[CommitDef] = [] for commit_msg in commit_msgs: if not git_repo.is_dirty(index=True, working_tree=False): add_text_to_file(git_repo, file_in_repo) changelog_entries.append(commit_n_rtn_changelog_entry(git_repo, commit_msg)) return changelog_entries return _simulate_change_commits_n_rtn_changelog_entry @pytest.fixture(scope="session") def get_hvcs_client_from_repo_def( example_git_https_url: str, get_cfg_value_from_def: GetCfgValueFromDefFn, ) -> GetHvcsClientFromRepoDefFn: hvcs_client_classes = { Bitbucket.__name__.lower(): Bitbucket, Github.__name__.lower(): Github, Gitea.__name__.lower(): Gitea, Gitlab.__name__.lower(): Gitlab, } def _get_hvcs_client_from_repo_def( repo_def: Sequence[RepoActions], ) -> Github | Gitlab | Gitea | Bitbucket: hvcs_type = get_cfg_value_from_def(repo_def, "hvcs_client_name") hvcs_client_class = hvcs_client_classes[hvcs_type] # Prevent the HVCS client from using the environment variables with mock.patch.dict(os.environ, {}, clear=True): hvcs_client = cast( "HvcsBase", hvcs_client_class( example_git_https_url, hvcs_domain=get_cfg_value_from_def(repo_def, "hvcs_domain"), ), ) # Force the HVCS client to attempt to resolve the repo name (as we generally cache it) assert hvcs_client.repo_name assert hvcs_client.owner return cast("Github | Gitlab | Gitea | Bitbucket", hvcs_client) return _get_hvcs_client_from_repo_def @pytest.fixture(scope="session") def build_configured_base_repo( # noqa: C901 cached_example_git_project: Path, configure_base_repo: BuildRepoFn, ) -> BuildRepoFn: """ This fixture is intended to simplify repo scenario building by initially creating the repo but also configuring semantic_release in the pyproject.toml for when the test executes semantic_release. It returns a function so that derivative fixtures can call this fixture with individual parameters. """ def _build_configured_base_repo( # noqa: C901 dest_dir: Path | str, commit_type: CommitConvention = "conventional", hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, # Default as of v10 package_name: str = EXAMPLE_PROJECT_NAME, monorepo: bool = False, ) -> tuple[Path, HvcsBase]: if not cached_example_git_project.exists(): raise RuntimeError("Unable to find cached git project files!") # Copy the cached git project the dest directory copy_dir_tree(cached_example_git_project, dest_dir) return configure_base_repo( dest_dir=dest_dir, commit_type=commit_type, hvcs_client_name=hvcs_client_name, hvcs_domain=hvcs_domain, tag_format_str=tag_format_str, extra_configs=extra_configs, mask_initial_release=mask_initial_release, package_name=package_name, monorepo=monorepo, ) return _build_configured_base_repo @pytest.fixture(scope="session") def configure_base_repo( # noqa: C901 use_github_hvcs: UseHvcsFn, use_gitlab_hvcs: UseHvcsFn, use_gitea_hvcs: UseHvcsFn, use_bitbucket_hvcs: UseHvcsFn, use_conventional_parser: UseParserFn, use_emoji_parser: UseParserFn, use_scipy_parser: UseParserFn, use_custom_parser: UseCustomParserFn, example_git_https_url: str, update_pyproject_toml: UpdatePyprojectTomlFn, get_wheel_file: GetWheelFileFn, pyproject_toml_file: Path, get_hvcs: GetHvcsFn, ) -> BuildRepoFn: """ This fixture is intended to simplify repo scenario building by initially creating the repo but also configuring semantic_release in the pyproject.toml for when the test executes semantic_release. It returns a function so that derivative fixtures can call this fixture with individual parameters. """ def _configure_base_repo( # noqa: C901 dest_dir: Path | str, commit_type: str = "conventional", hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, # Default as of v10 package_name: str = EXAMPLE_PROJECT_NAME, monorepo: bool = False, ) -> tuple[Path, HvcsBase]: # Make sure we are in the dest directory with temporary_working_directory(dest_dir): # Set parser configuration if commit_type == "conventional": use_conventional_parser( toml_file=pyproject_toml_file, monorepo=monorepo ) elif commit_type == "emoji": use_emoji_parser(toml_file=pyproject_toml_file, monorepo=monorepo) elif commit_type == "scipy": use_scipy_parser(toml_file=pyproject_toml_file, monorepo=monorepo) else: use_custom_parser(commit_type, toml_file=pyproject_toml_file) # Set HVCS configuration if hvcs_client_name == "github": use_github_hvcs(hvcs_domain, toml_file=pyproject_toml_file) elif hvcs_client_name == "gitlab": use_gitlab_hvcs(hvcs_domain, toml_file=pyproject_toml_file) elif hvcs_client_name == "gitea": use_gitea_hvcs(hvcs_domain, toml_file=pyproject_toml_file) elif hvcs_client_name == "bitbucket": use_bitbucket_hvcs(hvcs_domain, toml_file=pyproject_toml_file) else: raise ValueError(f"Unknown HVCS client name: {hvcs_client_name}") # Create HVCS Client instance hvcs = get_hvcs( hvcs_client_name=hvcs_client_name, origin_url=example_git_https_url, hvcs_domain=hvcs_domain, ) # Set tag format in configuration if tag_format_str is not None: update_pyproject_toml( "tool.semantic_release.tag_format", tag_format_str, toml_file=pyproject_toml_file, ) # Set the build_command to create a wheel file (using the build_command_env version variable) build_result_file = ( get_wheel_file("$NEW_VERSION", pkg_name=package_name) if sys.platform != "win32" else get_wheel_file("$Env:NEW_VERSION", pkg_name=package_name) ) update_pyproject_toml( # NOTE: must work in both bash and Powershell "tool.semantic_release.build_command", # NOTE: we are trying to ensure a few non-file-path characters are removed, but this is not # the equivalent of a cononcial version translator, so it may not work in all cases dedent( f"""\ mkdir -p "{build_result_file.parent}" WHEEL_FILE="$(printf '%s' "{build_result_file}" | sed 's/+/./g')" touch "$WHEEL_FILE" """ if sys.platform != "win32" else f"""\ mkdir {build_result_file.parent} > $null $WHEEL_FILE = "{build_result_file}".Replace('+', '.') New-Item -ItemType file -Path "$WHEEL_FILE" -Force | Select-Object OriginalPath """ ), toml_file=pyproject_toml_file, ) # Set whether or not the initial release should be masked update_pyproject_toml( "tool.semantic_release.changelog.default_templates.mask_initial_release", mask_initial_release, toml_file=pyproject_toml_file, ) # Apply configurations to pyproject.toml if extra_configs is not None: for key, value in extra_configs.items(): update_pyproject_toml(key, value, toml_file=pyproject_toml_file) return Path(dest_dir), hvcs return _configure_base_repo @pytest.fixture(scope="session") def separate_squashed_commit_def() -> SeparateSquashedCommitDefFn: # default_conventional_parser: ConventionalCommitParser, # default_emoji_parser: EmojiCommitParser, # default_scipy_parser: ScipyCommitParser, # message_parsers: dict[ # CommitConvention, # ConventionalCommitParser | EmojiCommitParser | ScipyCommitParser, # ] = { # "conventional": ConventionalCommitParser( # options=ConventionalCommitParserOptions( # **{ # **default_conventional_parser.options.__dict__, # "parse_squash_commits": True, # } # ) # ), # "emoji": EmojiCommitParser( # options=EmojiParserOptions( # **{ # **default_emoji_parser.options.__dict__, # "parse_squash_commits": True, # } # ) # ), # "scipy": ScipyCommitParser( # options=ScipyParserOptions( # **{ # **default_scipy_parser.options.__dict__, # "parse_squash_commits": True, # } # ) # ), # } def _separate_squashed_commit_def( squashed_commit_def: CommitDef, parser: SquashedCommitSupportedParser, ) -> list[CommitDef]: if not hasattr(parser, "unsquash_commit_message"): return [squashed_commit_def] unsquashed_messages = parser.unsquash_commit_message( message=squashed_commit_def["msg"] ) commit_num_gen = (i for i in count(start=1, step=1)) return [ { "cid": f"{squashed_commit_def['cid']}-{next(commit_num_gen)}", "msg": squashed_message, "type": parsed_result.type, "category": parsed_result.category, "desc": str.join("\n\n", parsed_result.descriptions), "brking_desc": str.join("\n\n", parsed_result.breaking_descriptions), "scope": parsed_result.scope, "mr": parsed_result.linked_merge_request or squashed_commit_def["mr"], "sha": squashed_commit_def["sha"], "include_in_changelog": True, "datetime": squashed_commit_def.get("datetime", ""), } for parsed_result, squashed_message in iter( (parser.parse_message(squashed_msg), squashed_msg) for squashed_msg in unsquashed_messages ) if parsed_result is not None ] return _separate_squashed_commit_def @pytest.fixture(scope="session") def convert_commit_spec_to_commit_def( get_commit_def_of_conventional_commit: GetCommitDefFn[ConventionalCommitParser], get_commit_def_of_conventional_commit_monorepo: GetCommitDefFn[ ConventionalCommitMonorepoParser ], get_commit_def_of_emoji_commit: GetCommitDefFn[EmojiCommitParser], get_commit_def_of_scipy_commit: GetCommitDefFn[ScipyCommitParser], stable_now_date: datetime, ) -> ConvertCommitSpecToCommitDefFn: message_parsers = { "conventional": get_commit_def_of_conventional_commit, "conventional-monorepo": get_commit_def_of_conventional_commit_monorepo, "emoji": get_commit_def_of_emoji_commit, "scipy": get_commit_def_of_scipy_commit, } def _convert( commit_spec: CommitSpec, commit_type: CommitConvention, parser: CommitParser[ParseResult, ParserOptions], monorepo: bool = False, ) -> CommitDef: parse_msg_fn = cast( "GetCommitDefFn[Any]", message_parsers[f"{commit_type}{'-monorepo' if monorepo else ''}"], ) # Extract the correct commit message for the commit type return { **parse_msg_fn(commit_spec[commit_type], parser=parser), "cid": commit_spec["cid"], "datetime": ( commit_spec["datetime"] if "datetime" in commit_spec else stable_now_date.isoformat(timespec="seconds") ), "include_in_changelog": (commit_spec.get("include_in_changelog", True)), } return _convert @pytest.fixture(scope="session") def convert_commit_specs_to_commit_defs( convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, ) -> ConvertCommitSpecsToCommitDefsFn: def _convert( commits: Sequence[CommitSpec], commit_type: CommitConvention, parser: CommitParser[ParseResult, ParserOptions], monorepo: bool = False, ) -> Sequence[CommitDef]: return [ convert_commit_spec_to_commit_def( commit, commit_type, parser=parser, monorepo=monorepo ) for commit in commits ] return _convert @pytest.fixture(scope="session") def build_repo_from_definition( # noqa: C901, its required and its just test code build_configured_base_repo: BuildRepoFn, build_base_monorepo: BuildMonorepoFn, configure_monorepo_package: BuildRepoFn, default_tag_format_str: str, create_release_tagged_commit: CreateReleaseFn, create_squash_merge_commit: CreateSquashMergeCommitFn, create_merge_commit: CreateMergeCommitFn, simulate_change_commits_n_rtn_changelog_entry: SimulateChangeCommitsNReturnChangelogEntryFn, simulate_default_changelog_creation: SimulateDefaultChangelogCreationFn, separate_squashed_commit_def: SeparateSquashedCommitDefFn, get_hvcs: GetHvcsFn, example_git_https_url: str, get_parser_from_config_file: GetParserFromConfigFileFn, pyproject_toml_file: Path, ) -> BuildRepoFromDefinitionFn: def expand_repo_construction_steps( acc: Sequence[RepoActions], step: RepoActions ) -> Sequence[RepoActions]: empty_tuple = cast("tuple[RepoActions, ...]", ()) unpacked_pre_actions = reduce( expand_repo_construction_steps, # type: ignore[arg-type] step["details"].pop("pre_actions", empty_tuple), empty_tuple, ) unpacked_post_actions = reduce( expand_repo_construction_steps, # type: ignore[arg-type] step["details"].pop("post_actions", empty_tuple), empty_tuple, ) return (*acc, *unpacked_pre_actions, step, *unpacked_post_actions) def _build_repo_from_definition( # noqa: C901, its required and its just test code dest_dir: Path | str, repo_construction_steps: Sequence[RepoActions] ) -> Sequence[RepoActions]: completed_repo_steps: list[RepoActions] = [] expanded_repo_construction_steps: tuple[RepoActions, ...] = tuple( reduce( expand_repo_construction_steps, # type: ignore[arg-type] repo_construction_steps, (), ) ) repo_dir = Path(dest_dir).resolve().absolute() commit_type: CommitConvention = "conventional" hvcs: Github | Gitlab | Gitea | Bitbucket commit_cache: dict[str, CommitDef] = {} current_repo_def: dict[Version | Literal["Unreleased"], RepoVersionDef] = {} with temporary_working_directory(repo_dir): for step in expanded_repo_construction_steps: step_result = deepcopy(step) action = step["action"] if action == RepoActionStep.CONFIGURE: cfg_def = cast("RepoActionConfigureDetails", step_result["details"]) # Make sure the resulting build definition is complete with the default cfg_def["tag_format_str"] = ( cfg_def["tag_format_str"] or default_tag_format_str ) _, hvcs = build_configured_base_repo( # type: ignore[assignment] # TODO: fix the type error dest_dir, **{ key: cfg_def[key] # type: ignore[literal-required] for key in [ "commit_type", "hvcs_client_name", "hvcs_domain", "tag_format_str", "mask_initial_release", "extra_configs", ] }, ) elif action == RepoActionStep.CREATE_MONOREPO: cfg_mr_def = cast( "RepoActionCreateMonorepoDetails", step_result["details"] ) build_base_monorepo(dest_dir=repo_dir) hvcs = get_hvcs( hvcs_client_name=cfg_mr_def["hvcs_client_name"], origin_url=cfg_mr_def.get("origin_url") or example_git_https_url, hvcs_domain=cfg_mr_def["hvcs_domain"], ) commit_type = cfg_mr_def["commit_type"] elif action == RepoActionStep.CONFIGURE_MONOREPO: cfg_mr_pkg_def = cast( "RepoActionConfigureMonorepoDetails", step_result["details"] ) configure_monorepo_package( dest_dir=cfg_mr_pkg_def["package_dir"], commit_type=commit_type, hvcs_client_name=hvcs.__class__.__name__.lower(), hvcs_domain=str(hvcs.hvcs_domain), tag_format_str=cfg_mr_pkg_def["tag_format_str"], extra_configs=cfg_mr_pkg_def["extra_configs"], mask_initial_release=cfg_mr_pkg_def["mask_initial_release"], package_name=cfg_mr_pkg_def["package_name"], monorepo=True, ) elif action == RepoActionStep.CHANGE_DIRECTORY: change_dir_def = cast( "RepoActionChangeDirectoryDetails", step_result["details"] ) if not ( new_cwd := Path(change_dir_def["directory"]) .resolve() .absolute() ).exists(): msg = f"Directory {change_dir_def['directory']} does not exist." raise NotADirectoryError(msg) # Helpful Transform to find the project root repo without needing to pass it around (ie '/' => repo_dir) new_cwd = ( repo_dir if str(new_cwd) == str(repo_dir.root) else new_cwd ) if not new_cwd.is_dir(): msg = f"Path {change_dir_def['directory']} is not a directory." raise NotADirectoryError(msg) # TODO: 3.9+, use is_relative_to # if not new_cwd.is_relative_to(repo_dir): if repo_dir != new_cwd and repo_dir not in new_cwd.parents: msg = f"Cannot change directory to '{new_cwd}' as it is outside the repo directory '{repo_dir}'." raise ValueError(msg) os.chdir(str(new_cwd)) elif action == RepoActionStep.MAKE_COMMITS: mk_cmts_def = cast( "RepoActionMakeCommitsDetails", step_result["details"] ) # update the commit definitions with the repo hashes with Repo(repo_dir) as git_repo: mk_cmts_def["commits"] = ( simulate_change_commits_n_rtn_changelog_entry( git_repo, mk_cmts_def["commits"], ) ) for commit in mk_cmts_def["commits"]: if commit["cid"] in commit_cache: raise ValueError( f"Duplicate commit id '{commit['cid']}' detected!" ) commit_cache.update({commit["cid"]: commit}) elif action == RepoActionStep.WRITE_CHANGELOGS: w_chlgs_def = cast( "RepoActionWriteChangelogsDetails", step["details"] ) # Mark the repo definition with the latest stored commits for the upcoming release new_version = w_chlgs_def["new_version"] current_repo_def.update( { new_version: { "commits": [ *filter( None, ( cmt for commit_id in w_chlgs_def["commit_ids"] if (cmt := commit_cache[commit_id])[ "include_in_changelog" ] ), ) ] } } ) # in order to support monorepo changelogs we must filter and map the stored repo definition # to match only the sub-package's versions which are identified by matching tag formats filtered_repo_def_4_changelog: RepoDefinition = { str(version): repo_def for version, repo_def in current_repo_def.items() if ( isinstance(version, Version) and isinstance(new_version, Version) and version.tag_format == new_version.tag_format ) or version == new_version } # Write each changelog with the current repo definition with Repo(repo_dir) as git_repo: for changelog_file_def in w_chlgs_def["dest_files"]: changelog_file = repo_dir.joinpath( changelog_file_def["path"] ) simulate_default_changelog_creation( filtered_repo_def_4_changelog, hvcs=hvcs, dest_file=changelog_file, output_format=changelog_file_def["format"], mask_initial_release=changelog_file_def[ "mask_initial_release" ], max_version=w_chlgs_def.get("max_version"), ) git_repo.git.add(str(changelog_file), force=True) elif action == RepoActionStep.RELEASE: release_def = cast("RepoActionReleaseDetails", step["details"]) with Repo(repo_dir) as git_repo: create_release_tagged_commit( git_repo, version=release_def["version"], tag_format=release_def.get( "tag_format", default_tag_format_str ), timestamp=release_def["datetime"], version_py_file=release_def.get("version_py_file", ""), commit_message_format=release_def.get( "commit_message_format", COMMIT_MESSAGE ), ) elif action == RepoActionStep.GIT_CHECKOUT: ckout_def = cast("RepoActionGitCheckoutDetails", step["details"]) with Repo(repo_dir) as git_repo: if "create_branch" in ckout_def: create_branch_def = ckout_def["create_branch"] start_head = git_repo.heads[ create_branch_def["start_branch"] ] new_branch_head = git_repo.create_head( create_branch_def["name"], commit=start_head.commit, ) new_branch_head.checkout() elif "branch" in ckout_def: git_repo.heads[ckout_def["branch"]].checkout() elif action == RepoActionStep.GIT_SQUASH: squash_def = cast( "RepoActionGitSquashDetails", step_result["details"] ) # Update the commit definition with the repo hash with Repo(repo_dir) as git_repo: squash_def["commit_def"] = create_squash_merge_commit( git_repo=git_repo, branch_name=squash_def["branch"], commit_def=squash_def["commit_def"], strategy_option=squash_def["strategy_option"], ) if ( first_cid := f"{squash_def['commit_def']['cid']}-1" ) in commit_cache: raise ValueError( f"Duplicate commit id '{first_cid}' detected!" ) commit_cache.update( { squashed_commit_def["cid"]: squashed_commit_def for squashed_commit_def in separate_squashed_commit_def( squashed_commit_def=squash_def["commit_def"], parser=cast( "SquashedCommitSupportedParser", get_parser_from_config_file( file=squash_def.get( "config_file", pyproject_toml_file ), ), ), ) } ) elif action == RepoActionStep.GIT_MERGE: this_step = cast( "RepoActionGitMerge[RepoActionGitFFMergeDetails | RepoActionGitMergeDetails]", step_result, ) with Repo(repo_dir) as git_repo: if this_step["details"]["fast_forward"]: git_repo.git.merge( this_step["details"]["branch_name"], ff=True ) else: merge_def = this_step["details"] # Update the commit definition with the repo hash merge_def["commit_def"] = create_merge_commit( git_repo=git_repo, branch_name=merge_def["branch_name"], commit_def=merge_def["commit_def"], fast_forward=merge_def["fast_forward"], strategy_option=merge_def.get( "strategy_option", DEFAULT_MERGE_STRATEGY_OPTION ), ) if merge_def["commit_def"]["cid"] in commit_cache: raise ValueError( f"Duplicate commit id '{merge_def['commit_def']['cid']}' detected!" ) commit_cache.update( { merge_def["commit_def"]["cid"]: merge_def[ "commit_def" ] } ) else: raise ValueError(f"Unknown action: {action}") completed_repo_steps.append(step_result) return completed_repo_steps return _build_repo_from_definition @pytest.fixture(scope="session") def get_cfg_value_from_def() -> GetCfgValueFromDefFn: def _get_cfg_value_from_def( build_definition: Sequence[RepoActions], key: str ) -> Any: configure_steps = [ step for step in build_definition if step["action"] == RepoActionStep.CONFIGURE ] for step in configure_steps[::-1]: if key in step["details"]: return step["details"][key] # type: ignore[literal-required] raise ValueError(f"Unable to find configuration key: {key}") return _get_cfg_value_from_def @pytest.fixture(scope="session") def get_versions_from_repo_build_def( default_tag_format_str: str, ) -> GetVersionsFromRepoBuildDefFn: def _get_versions(repo_def: Sequence[RepoActions]) -> Sequence[Version]: return [ Version.parse( step["details"]["version"], tag_format=step["details"].get("tag_format", default_tag_format_str), ) for step in repo_def if step["action"] == RepoActionStep.RELEASE ] return _get_versions @pytest.fixture(scope="session") def get_commits_from_repo_build_def() -> GetCommitsFromRepoBuildDefFn: def _get_commits( build_definition: Sequence[RepoActions], filter_4_changelog: bool = False, ignore_merge_commits: bool = False, ) -> RepoDefinition: # Extract the commits from the build definition repo_def: RepoDefinition = {} commits: list[CommitDef] = [] for build_step in build_definition: if build_step["action"] == RepoActionStep.MAKE_COMMITS: commits_made = deepcopy(build_step["details"]["commits"]) if filter_4_changelog: commits_made = list( filter( lambda commit: commit["include_in_changelog"], commits_made ) ) commits.extend(commits_made) elif any( ( build_step["action"] == RepoActionStep.GIT_SQUASH, build_step["action"] == RepoActionStep.GIT_MERGE, ) ): if "commit_def" in build_step["details"]: commit_def = build_step["details"]["commit_def"] # type: ignore[typeddict-item] if any( ( ignore_merge_commits and build_step["action"] == RepoActionStep.GIT_MERGE, filter_4_changelog and not commit_def["include_in_changelog"], ) ): continue commits.append(commit_def) elif build_step["action"] == RepoActionStep.RELEASE: version = build_step["details"]["version"] repo_def[version] = {"commits": [*commits]} commits.clear() # Any remaining commits are considered unreleased if len(commits) > 0: repo_def["Unreleased"] = {"commits": [*commits]} return repo_def return _get_commits @pytest.fixture(scope="session") def split_repo_actions_by_release_tags( get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ) -> SplitRepoActionsByReleaseTagsFn: def _split_repo_actions_by_release_tags( repo_definition: Sequence[RepoActions], ) -> dict[Version | Literal["Unreleased"] | None, list[RepoActions]]: releasetags_2_steps: dict[ Version | Literal["Unreleased"] | None, list[RepoActions] ] = { None: [], } # Create generator for next release tags next_release_tag_gen = ( version for version in get_versions_from_repo_build_def(repo_definition) ) # initialize the first release tag curr_release_tag: Version | Literal["Unreleased"] = next(next_release_tag_gen) releasetags_2_steps[curr_release_tag] = [] # Loop through all actions and split them by release tags for step in repo_definition: if any( step["action"] == action for action in [ RepoActionStep.CONFIGURE, RepoActionStep.CREATE_MONOREPO, RepoActionStep.CONFIGURE_MONOREPO, ] ): releasetags_2_steps[None].append(step) continue if step["action"] == RepoActionStep.WRITE_CHANGELOGS: continue releasetags_2_steps[curr_release_tag].append(step) if step["action"] == RepoActionStep.RELEASE: try: curr_release_tag = next(next_release_tag_gen) releasetags_2_steps[curr_release_tag] = [] except StopIteration: curr_release_tag = "Unreleased" releasetags_2_steps[curr_release_tag] = [] insignificant_actions = [ RepoActionStep.GIT_CHECKOUT, RepoActionStep.CHANGE_DIRECTORY, ] # Remove Unreleased if there are no significant steps in an Unreleased section if "Unreleased" in releasetags_2_steps and not [ step for step in releasetags_2_steps["Unreleased"] if step["action"] not in insignificant_actions ]: del releasetags_2_steps["Unreleased"] # Return all actions split up by release tags return releasetags_2_steps return _split_repo_actions_by_release_tags @pytest.fixture(scope="session") def simulate_default_changelog_creation( # noqa: C901 default_md_changelog_insertion_flag: str, default_rst_changelog_insertion_flag: str, today_date_str: str, ) -> SimulateDefaultChangelogCreationFn: def reduce_repo_def( acc: BaseAccumulatorVersionReduction, ver_2_def: tuple[str, RepoVersionDef] ) -> BaseAccumulatorVersionReduction: version_str, version_def = ver_2_def if Version.parse(version_str) <= acc["version_limit"]: acc["repo_def"][version_str] = version_def return acc def build_version_entry_markdown( version: VersionStr, version_def: RepoVersionDef, hvcs: Github | Gitlab | Gitea | Bitbucket, ) -> str: version_entry = [ f"## {version}\n" if version == "Unreleased" else f"## v{version} ({today_date_str})\n" ] changelog_sections = sorted( {commit["category"] for commit in version_def["commits"]} ) brking_descriptions = [] for section in changelog_sections: # Create Markdown section heading section_title = section.title() if not section.startswith(":") else section version_entry.append(f"### {section_title}\n") commits: list[CommitDef] = list( filter( lambda commit, section=section: ( # type: ignore[arg-type] commit["category"] == section ), version_def["commits"], ) ) section_bullets = [] # format each commit for commit_def in commits: descriptions = commit_def["desc"].split("\n\n") if commit_def["brking_desc"]: brking_descriptions.append( "- {commit_scope}{brk_desc}".format( commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), brk_desc=commit_def["brking_desc"].capitalize(), ) ) # NOTE: We have to be wary of the line length as the default changelog # has a 100 character limit or otherwise our tests will fail because the # URLs and whitespace don't line up subject_line = "- {commit_scope}{commit_desc}".format( commit_desc=descriptions[0].capitalize(), commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), ) mr_link = ( "" if not commit_def["mr"] else "([{mr}]({mr_url}),".format( mr=commit_def["mr"], mr_url=hvcs.pull_request_url(commit_def["mr"]), ) ) sha_link = "[`{short_sha}`]({commit_url}))".format( short_sha=commit_def["sha"][:7], commit_url=hvcs.commit_hash_url(commit_def["sha"]), ) # Add opening parenthesis if no MR link sha_link = sha_link if mr_link else f"({sha_link}" # NOTE: we are assuming that the subject line is always less than 100 characters commit_cl_desc = f"{subject_line} {mr_link}".rstrip() if len(commit_cl_desc) > 100: commit_cl_desc = f"{subject_line}\n {mr_link}".rstrip() if len(f"{commit_cl_desc} {sha_link}") > 100: commit_cl_desc = f"{commit_cl_desc}\n {sha_link}\n" else: commit_cl_desc = f"{commit_cl_desc} {sha_link}\n" # COMMENTED out for v10 as the defualt changelog now only writes the subject line # if len(descriptions) > 1: # commit_cl_desc += ( # "\n" + str.join("\n\n", [*descriptions[1:]]) + "\n" # ) # Add commits to section if commit_cl_desc not in section_bullets: section_bullets.append(commit_cl_desc) version_entry.extend(sorted(section_bullets)) # Add breaking changes to the end of the version entry if brking_descriptions: version_entry.append("### Breaking Changes\n") version_entry.extend([*sorted(brking_descriptions), ""]) return str.join("\n", version_entry) def build_version_entry_restructured_text( version: VersionStr, version_def: RepoVersionDef, hvcs: Github | Gitlab | Gitea | Bitbucket, ) -> str: version_entry = [ ( ".. _changelog-unreleased:" if version == "Unreleased" else f".. _changelog-v{version}:" ), "", ( f"{version}" if version == "Unreleased" else f"v{version} ({today_date_str})" ), ] version_entry.append("=" * len(version_entry[-1])) version_entry.append("") # Add newline changelog_sections = sorted( {commit["category"] for commit in version_def["commits"]} ) brking_descriptions = [] urls = [] for section in changelog_sections: # Create RestructuredText section heading section_title = section.title() if not section.startswith(":") else section version_entry.append(f"{section_title}") version_entry.append("-" * (len(version_entry[-1])) + "\n") # Filter commits by section commits: list[CommitDef] = list( filter( lambda commit, section=section: ( # type: ignore[arg-type] commit["category"] == section ), version_def["commits"], ) ) section_bullets = [] for commit_def in commits: descriptions = commit_def["desc"].split("\n\n") if commit_def["brking_desc"]: brking_descriptions.append( "* {commit_scope}{brk_desc}".format( commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), brk_desc=commit_def["brking_desc"].capitalize(), ) ) # NOTE: We have to be wary of the line length as the default changelog # has a 100 character limit or otherwise our tests will fail because the # URLs and whitespace don't line up subject_line = "* {commit_scope}{commit_desc}".format( commit_desc=descriptions[0].capitalize(), commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), ) mr_link = ( "" if not commit_def["mr"] else "(`{mr}`_,".format( mr=commit_def["mr"], ) ) sha_link = "`{short_sha}`_)".format( short_sha=commit_def["sha"][:7], ) # Add opening parenthesis if no MR link sha_link = sha_link if mr_link else f"({sha_link}" # NOTE: we are assuming that the subject line is always less than 100 characters commit_cl_desc = f"{subject_line} {mr_link}".rstrip() if len(commit_cl_desc) > 100: commit_cl_desc = f"{subject_line}\n {mr_link}".rstrip() if len(f"{commit_cl_desc} {sha_link}") > 100: commit_cl_desc = f"{commit_cl_desc}\n {sha_link}\n" else: commit_cl_desc = f"{commit_cl_desc} {sha_link}\n" # COMMENTED out for v10 as the defualt changelog now only writes the subject line # if len(descriptions) > 1: # commit_cl_desc += ( # "\n" + str.join("\n\n", [*descriptions[1:]]) + "\n" # ) # Add commits to section if commit_cl_desc not in section_bullets: section_bullets.append(commit_cl_desc) version_entry.extend(sorted(section_bullets)) urls.extend( [ *[ ".. _{mr}: {mr_url}".format( mr=commit_def["mr"], mr_url=hvcs.pull_request_url(commit_def["mr"]), ) for commit_def in commits if commit_def["mr"] ], *[ ".. _{short_sha}: {commit_url}".format( short_sha=commit_def["sha"][:7], commit_url=hvcs.commit_hash_url(commit_def["sha"]), ) for commit_def in commits ], ] ) # Add breaking changes to the end of the version entry if brking_descriptions: version_entry.append("Breaking Changes") version_entry.append("-" * len(version_entry[-1]) + "\n") version_entry.extend([*sorted(brking_descriptions), ""]) # Add commit URLs to the end of the version entry version_entry.extend(sorted(set(urls))) if version_entry[-1] == "": version_entry.pop() return str.join("\n", version_entry) + "\n" def build_version_entry( version: VersionStr, version_def: RepoVersionDef, output_format: ChangelogOutputFormat, hvcs: Github | Gitlab | Gitea | Bitbucket, ) -> str: output_functions = { ChangelogOutputFormat.MARKDOWN: build_version_entry_markdown, ChangelogOutputFormat.RESTRUCTURED_TEXT: build_version_entry_restructured_text, } return output_functions[output_format](version, version_def, hvcs) def build_initial_version_entry( version: VersionStr, version_def: RepoVersionDef, output_format: ChangelogOutputFormat, hvcs: Github | Gitlab | Gitea | Bitbucket, ) -> str: if output_format == ChangelogOutputFormat.MARKDOWN: return str.join( "\n", [ f"## v{version} ({today_date_str})", "", "- Initial Release", "", ], ) if output_format == ChangelogOutputFormat.RESTRUCTURED_TEXT: title = f"v{version} ({today_date_str})" return str.join( "\n", [ f".. _changelog-v{version}:", "", title, "=" * len(title), "", "* Initial Release", "", ], ) raise ValueError(f"Unknown output format: {output_format}") def _mimic_semantic_release_default_changelog( repo_definition: RepoDefinition, hvcs: Github | Gitlab | Gitea | Bitbucket, dest_file: Path | None = None, max_version: Version | Literal["Unreleased"] | None = None, output_format: ChangelogOutputFormat = ChangelogOutputFormat.MARKDOWN, mask_initial_release: bool = True, # Default as of v10 ) -> str: if output_format == ChangelogOutputFormat.MARKDOWN: header = dedent( f"""\ # CHANGELOG {default_md_changelog_insertion_flag} """ ).rstrip() elif output_format == ChangelogOutputFormat.RESTRUCTURED_TEXT: universal_newline_insertion_flag = ( default_rst_changelog_insertion_flag.replace("\r", "") ) header = str.join( "\n\n", [ dedent( """\ .. _changelog: ========= CHANGELOG ========= """ ).rstrip(), universal_newline_insertion_flag, ], ) else: raise ValueError(f"Unknown output format: {output_format}") version_entries: list[str] = [] repo_def: RepoDefinition = ( repo_definition # type: ignore[assignment] if max_version is None else reduce( reduce_repo_def, # type: ignore[arg-type] repo_definition.items(), { "version_limit": max_version, "repo_def": {}, }, )["repo_def"] ) for i, (version, version_def) in enumerate(repo_def.items()): # prepend entries to force reverse ordering entry = ( build_initial_version_entry(version, version_def, output_format, hvcs) if i == 0 and mask_initial_release and version != "Unreleased" else build_version_entry(version, version_def, output_format, hvcs) ) version_entries.insert(0, entry) changelog_content = ( str.join( "\n" * 2, [header, str.join("\n" * 2, list(version_entries))] ).rstrip() + "\n" ) if dest_file is not None: # Converts uninversal newlines to the OS-specific upon write dest_file.write_text(changelog_content) return changelog_content return _mimic_semantic_release_default_changelog @pytest.fixture(scope="session") def generate_default_release_notes_from_def( # noqa: C901 today_date_str: str, get_commits_from_repo_build_def: GetCommitsFromRepoBuildDefFn, ) -> GenerateDefaultReleaseNotesFromDefFn: def build_version_entry_markdown( version: VersionStr, version_def: RepoVersionDef, hvcs: Github | Gitlab | Gitea | Bitbucket, license_name: str, ) -> str: version_entry = [ f"## v{version} ({today_date_str})", *( [""] if not license_name else [ "", f"_This release is published under the {license_name} License._", "", ] ), ] changelog_sections = sorted( {commit["category"] for commit in version_def["commits"]} ) brking_descriptions = [] for section in changelog_sections: # Create Markdown section heading section_title = section.title() if not section.startswith(":") else section version_entry.append(f"### {section_title}\n") commits: list[CommitDef] = list( filter( lambda commit, section=section: ( # type: ignore[arg-type] commit["category"] == section ), version_def["commits"], ) ) section_bullets = [] # format each commit for commit_def in commits: descriptions = commit_def["desc"].split("\n\n") if commit_def["brking_desc"]: brking_descriptions.append( "- {commit_scope}{brk_desc}".format( commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), brk_desc=commit_def["brking_desc"].capitalize(), ) ) # NOTE: During release notes, we make the line length very large as the VCS # will handle the line wrapping for us so here we don't have to worry about it max_line_length = 1000 subject_line = "- {commit_scope}{commit_desc}".format( commit_desc=descriptions[0].capitalize(), commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), ) mr_link = ( "" if not commit_def["mr"] else "([{mr}]({mr_url}),".format( mr=commit_def["mr"], mr_url=hvcs.pull_request_url(commit_def["mr"]), ) ) sha_link = "[`{short_sha}`]({commit_url}))".format( short_sha=commit_def["sha"][:7], commit_url=hvcs.commit_hash_url(commit_def["sha"]), ) # Add opening parenthesis if no MR link sha_link = sha_link if mr_link else f"({sha_link}" commit_cl_desc = f"{subject_line} {mr_link}".rstrip() if len(commit_cl_desc) > max_line_length: commit_cl_desc = f"{subject_line}\n {mr_link}".rstrip() if len(f"{commit_cl_desc} {sha_link}") > max_line_length: commit_cl_desc = f"{commit_cl_desc}\n {sha_link}\n" else: commit_cl_desc = f"{commit_cl_desc} {sha_link}\n" # NOTE: remove this when we no longer are writing the whole commit msg (squash commits enabled) # if len(descriptions) > 1: # commit_cl_desc += ( # "\n" + str.join("\n\n", [*descriptions[1:]]) + "\n" # ) # Add commits to section section_bullets.append(commit_cl_desc) version_entry.extend(sorted(section_bullets)) # Add breaking changes to the end of the version entry if brking_descriptions: version_entry.append("### Breaking Changes\n") version_entry.extend([*sorted(brking_descriptions), ""]) return str.join("\n", version_entry) def build_initial_version_entry_markdown( version: VersionStr, license_name: str = "", ) -> str: return str.join( "\n", [ f"## v{version} ({today_date_str})", *( [""] if not license_name else [ "", f"_This release is published under the {license_name} License._", "", ] ), "- Initial Release", "", ], ) def _generate_default_release_notes( version_actions: Sequence[RepoActions], hvcs: Github | Gitlab | Gitea | Bitbucket, previous_version: Version | None = None, license_name: str = "", dest_file: Path | None = None, mask_initial_release: bool = True, # Default as of v10 ) -> str: limited_repo_def: RepoDefinition = get_commits_from_repo_build_def( build_definition=version_actions, filter_4_changelog=True, ) version: Version = Version.parse(next(iter(limited_repo_def.keys()))) version_def: RepoVersionDef = limited_repo_def[str(version)] release_notes_content = ( str.join( "\n" * 2, [ ( build_initial_version_entry_markdown(str(version), license_name) if mask_initial_release and not previous_version else build_version_entry_markdown( str(version), version_def, hvcs, license_name ) ).rstrip(), *( [ "---", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=previous_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( previous_version.as_tag(), version.as_tag() ), ), ] if previous_version and not isinstance(hvcs, Gitea) else [] ), ], ).rstrip() + "\n" ) if dest_file is not None: # Converts universal newlines to the OS-specific upon write dest_file.write_text(release_notes_content) # match the line endings of the current OS return ( str.join(os.linesep, release_notes_content.splitlines(keepends=False)) + os.linesep ) return _generate_default_release_notes @pytest.fixture def git_repo_for_directory() -> Generator[GetGitRepo4DirFn, None, None]: repos: list[Repo] = [] # Must be a callable function to ensure files exist before repo is opened def _git_repo_4_dir(directory: Path | str) -> Repo: if not Path(directory).exists(): raise RuntimeError("Unable to find git project!") repo = Repo(directory) repos.append(repo) return repo try: yield _git_repo_4_dir finally: for repo in repos: repo.close() @pytest.fixture def example_project_git_repo( example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, ) -> ExProjectGitRepoFn: def _example_project_git_repo() -> Repo: return git_repo_for_directory(example_project_dir) return _example_project_git_repo python-semantic-release-10.4.1/tests/fixtures/monorepos/000077500000000000000000000000001506116242600233745ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/fixtures/monorepos/__init__.py000066400000000000000000000003261506116242600255060ustar00rootroot00000000000000from tests.fixtures.monorepos.example_monorepo import * from tests.fixtures.monorepos.git_monorepo import * from tests.fixtures.monorepos.github_flow import * from tests.fixtures.monorepos.trunk_based_dev import * python-semantic-release-10.4.1/tests/fixtures/monorepos/example_monorepo.py000066400000000000000000000367371506116242600273370ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING import pytest # NOTE: use backport with newer API import tests.conftest import tests.const import tests.fixtures.example_project import tests.util from tests.const import ( EXAMPLE_PROJECT_NAME, EXAMPLE_PROJECT_VERSION, EXAMPLE_PYPROJECT_TOML_CONTENT, EXAMPLE_RELEASE_NOTES_TEMPLATE, ) from tests.util import copy_dir_tree, temporary_working_directory if TYPE_CHECKING: from typing import Any, Protocol, Sequence from tests.conftest import ( BuildRepoOrCopyCacheFn, GetMd5ForSetOfFilesFn, ) from tests.fixtures.example_project import ( UpdatePyprojectTomlFn, UpdateVersionPyFileFn, ) from tests.fixtures.git_repo import RepoActions # class GetWheelFileFn(Protocol): # def __call__(self, version_str: str) -> Path: ... class UpdatePkgPyprojectTomlFn(Protocol): def __call__(self, pkg_name: str, setting: str, value: Any) -> None: ... class UseCommonReleaseNotesTemplateFn(Protocol): def __call__(self) -> None: ... @pytest.fixture(scope="session") def deps_files_4_example_monorepo() -> list[Path]: return [ # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), Path(tests.fixtures.example_project.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_example_monorepo( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_example_monorepo: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_example_monorepo) @pytest.fixture(scope="session") def cached_example_monorepo( build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, monorepo_pkg1_dir: Path, monorepo_pkg2_dir: Path, monorepo_pkg1_version_py_file: Path, monorepo_pkg2_version_py_file: Path, monorepo_pkg1_pyproject_toml_file: Path, monorepo_pkg2_pyproject_toml_file: Path, build_spec_hash_4_example_monorepo: str, update_version_py_file: UpdateVersionPyFileFn, update_pyproject_toml: UpdatePyprojectTomlFn, ) -> Path: """ Initializes the example monorepo project. DO NOT USE DIRECTLY Use the `init_example_monorepo` fixture instead. """ def _build_project(cached_project_path: Path) -> Sequence[RepoActions]: # purposefully a relative path # example_dir = version_py_file.parent gitignore_contents = dedent( f""" *.pyc /{monorepo_pkg1_version_py_file} /{monorepo_pkg2_version_py_file} dist/ """ ).lstrip() init_py_contents = dedent( ''' """An example package with a very informative docstring.""" from ._version import __version__ def hello_world() -> None: print("{pkg_name} Hello World") ''' ).lstrip() with temporary_working_directory(cached_project_path): update_version_py_file( version=EXAMPLE_PROJECT_VERSION, version_file=monorepo_pkg1_version_py_file, ) update_version_py_file( version=EXAMPLE_PROJECT_VERSION, version_file=monorepo_pkg2_version_py_file, ) file_2_contents: list[tuple[str | Path, str]] = [ ( monorepo_pkg1_version_py_file.parent / "__init__.py", init_py_contents.format(pkg_name="Pkg 1:"), ), ( monorepo_pkg2_version_py_file.parent / "__init__.py", init_py_contents.format(pkg_name="Pkg 2:"), ), (".gitignore", gitignore_contents), (monorepo_pkg1_pyproject_toml_file, EXAMPLE_PYPROJECT_TOML_CONTENT), (monorepo_pkg2_pyproject_toml_file, EXAMPLE_PYPROJECT_TOML_CONTENT), ] for file, contents in file_2_contents: abs_filepath = cached_project_path.joinpath(file).resolve() # make sure the parent directory exists abs_filepath.parent.mkdir(parents=True, exist_ok=True) # write file contents abs_filepath.write_text(contents) config_updates: list[tuple[str, Any, Path]] = [ ( "tool.poetry.name", "pkg-1", cached_project_path / monorepo_pkg1_pyproject_toml_file, ), ( "tool.poetry.name", "pkg-2", cached_project_path / monorepo_pkg2_pyproject_toml_file, ), ( "tool.semantic_release.version_variables", [ f"{monorepo_pkg1_version_py_file.relative_to(monorepo_pkg1_dir)}:__version__" ], cached_project_path / monorepo_pkg1_pyproject_toml_file, ), ( "tool.semantic_release.version_variables", [ f"{monorepo_pkg2_version_py_file.relative_to(monorepo_pkg2_dir)}:__version__" ], cached_project_path / monorepo_pkg2_pyproject_toml_file, ), ] for setting, value, toml_file in config_updates: update_pyproject_toml( setting=setting, value=value, toml_file=toml_file, ) # This is a special build, we don't expose the Repo Actions to the caller return [] # End of _build_project() return build_repo_or_copy_cache( repo_name="example_monorepo", build_spec_hash=build_spec_hash_4_example_monorepo, build_repo_func=_build_project, ) @pytest.fixture def init_example_monorepo( example_project_dir: tests.fixtures.example_project.ExProjectDir, cached_example_monorepo: Path, change_to_ex_proj_dir: None, ) -> None: """This fixture initializes the example project in the current test's project directory.""" if not cached_example_monorepo.exists(): raise RuntimeError( f"Unable to find cached project files for {EXAMPLE_PROJECT_NAME}" ) # Copy the cached project files into the current test's project directory copy_dir_tree(cached_example_monorepo, example_project_dir) @pytest.fixture def monorepo_project_w_common_release_notes_template( init_example_monorepo: None, monorepo_use_common_release_notes_template: UseCommonReleaseNotesTemplateFn, ) -> None: monorepo_use_common_release_notes_template() @pytest.fixture(scope="session") def monorepo_pkg1_name() -> str: return "pkg1" @pytest.fixture(scope="session") def monorepo_pkg2_name() -> str: return "pkg2" @pytest.fixture(scope="session") def monorepo_pkg_dir_pattern() -> str: return str(Path("packages", "{package_name}")) @pytest.fixture(scope="session") def monorepo_pkg1_dir( monorepo_pkg1_name: str, monorepo_pkg_dir_pattern: str, ) -> str: return monorepo_pkg_dir_pattern.format(package_name=monorepo_pkg1_name) @pytest.fixture(scope="session") def monorepo_pkg2_dir( monorepo_pkg2_name: str, monorepo_pkg_dir_pattern: str, ) -> str: return monorepo_pkg_dir_pattern.format(package_name=monorepo_pkg2_name) @pytest.fixture(scope="session") def monorepo_pkg_version_py_file_pattern(monorepo_pkg_dir_pattern: str) -> str: return str(Path(monorepo_pkg_dir_pattern, "src", "{package_name}", "_version.py")) @pytest.fixture(scope="session") def monorepo_pkg1_version_py_file( monorepo_pkg1_name: str, monorepo_pkg_version_py_file_pattern: str, ) -> Path: return Path( monorepo_pkg_version_py_file_pattern.format(package_name=monorepo_pkg1_name) ) @pytest.fixture(scope="session") def monorepo_pkg2_version_py_file( monorepo_pkg2_name: str, monorepo_pkg_version_py_file_pattern: str, ) -> Path: return Path( monorepo_pkg_version_py_file_pattern.format(package_name=monorepo_pkg2_name) ) @pytest.fixture(scope="session") def monorepo_pkg_pyproject_toml_file_pattern( monorepo_pkg_dir_pattern: str, pyproject_toml_file: str, ) -> str: return str(Path(monorepo_pkg_dir_pattern, pyproject_toml_file)) @pytest.fixture(scope="session") def monorepo_pkg1_pyproject_toml_file( monorepo_pkg1_name: str, monorepo_pkg_pyproject_toml_file_pattern: str, ) -> Path: return Path( monorepo_pkg_pyproject_toml_file_pattern.format(package_name=monorepo_pkg1_name) ) @pytest.fixture(scope="session") def monorepo_pkg2_pyproject_toml_file( monorepo_pkg2_name: str, monorepo_pkg_pyproject_toml_file_pattern: str, ) -> Path: return Path( monorepo_pkg_pyproject_toml_file_pattern.format(package_name=monorepo_pkg2_name) ) @pytest.fixture(scope="session") def monorepo_pkg_dist_dir_pattern(monorepo_pkg_dir_pattern: str) -> str: return str(Path(monorepo_pkg_dir_pattern, "dist")) @pytest.fixture(scope="session") def monorepo_pkg1_dist_dir( monorepo_pkg1_name: str, monorepo_pkg_dist_dir_pattern: str, ) -> Path: return Path(monorepo_pkg_dist_dir_pattern.format(package_name=monorepo_pkg1_name)) @pytest.fixture(scope="session") def monorepo_pkg2_dist_dir( monorepo_pkg2_name: str, monorepo_pkg_dist_dir_pattern: str, ) -> Path: return Path(monorepo_pkg_dist_dir_pattern.format(package_name=monorepo_pkg2_name)) @pytest.fixture(scope="session") def monorepo_pkg_changelog_md_file_pattern(monorepo_pkg_dir_pattern: str) -> str: return str(Path(monorepo_pkg_dir_pattern, "CHANGELOG.md")) @pytest.fixture(scope="session") def monorepo_pkg1_changelog_md_file( monorepo_pkg1_name: str, monorepo_pkg_changelog_md_file_pattern: str, ) -> Path: return Path( monorepo_pkg_changelog_md_file_pattern.format(package_name=monorepo_pkg1_name) ) @pytest.fixture(scope="session") def monorepo_pkg2_changelog_md_file( monorepo_pkg2_name: str, monorepo_pkg_changelog_md_file_pattern: str, ) -> Path: return Path( monorepo_pkg_changelog_md_file_pattern.format(package_name=monorepo_pkg2_name) ) @pytest.fixture(scope="session") def monorepo_pkg_changelog_rst_file_pattern(monorepo_pkg_dir_pattern: str) -> str: return str(Path(monorepo_pkg_dir_pattern, "CHANGELOG.rst")) @pytest.fixture(scope="session") def monorepo_pkg1_changelog_rst_file( monorepo_pkg1_name: str, monorepo_pkg_changelog_rst_file_pattern: str, ) -> Path: return Path( monorepo_pkg_changelog_rst_file_pattern.format(package_name=monorepo_pkg1_name) ) @pytest.fixture(scope="session") def monorepo_pkg2_changelog_rst_file( monorepo_pkg2_name: str, monorepo_pkg_changelog_rst_file_pattern: str, ) -> Path: return Path( monorepo_pkg_changelog_rst_file_pattern.format(package_name=monorepo_pkg2_name) ) # @pytest.fixture(scope="session") # def get_wheel_file(dist_dir: Path) -> GetWheelFileFn: # def _get_wheel_file(version_str: str) -> Path: # return dist_dir / f"{EXAMPLE_PROJECT_NAME}-{version_str}-py3-none-any.whl" # return _get_wheel_file @pytest.fixture def example_monorepo_pkg_dir_pattern( tmp_path: Path, monorepo_pkg_dir_pattern: Path, ) -> str: return str(tmp_path.resolve() / monorepo_pkg_dir_pattern) @pytest.fixture def example_monorepo_pkg1_dir( monorepo_pkg1_name: str, example_monorepo_pkg_dir_pattern: str, ) -> Path: return Path( example_monorepo_pkg_dir_pattern.format(package_name=monorepo_pkg1_name) ) @pytest.fixture def example_monorepo_pkg2_dir( monorepo_pkg2_name: str, example_monorepo_pkg_dir_pattern: str, ) -> Path: return Path( example_monorepo_pkg_dir_pattern.format(package_name=monorepo_pkg2_name) ) @pytest.fixture def monorepo_use_common_release_notes_template( example_project_template_dir: Path, changelog_template_dir: Path, update_pyproject_toml: UpdatePyprojectTomlFn, monorepo_pkg1_pyproject_toml_file: Path, monorepo_pkg2_pyproject_toml_file: Path, ) -> UseCommonReleaseNotesTemplateFn: config_setting_template_dir = "tool.semantic_release.changelog.template_dir" def _use_release_notes_template() -> None: update_pyproject_toml( setting=config_setting_template_dir, value=str( Path( *( "../" for _ in list(Path(monorepo_pkg1_pyproject_toml_file).parents)[ :-1 ] ), changelog_template_dir, ) ), toml_file=monorepo_pkg1_pyproject_toml_file, ) update_pyproject_toml( setting=config_setting_template_dir, value=str( Path( *( "../" for _ in list(Path(monorepo_pkg2_pyproject_toml_file).parents)[ :-1 ] ), changelog_template_dir, ) ), toml_file=monorepo_pkg2_pyproject_toml_file, ) example_project_template_dir.mkdir(parents=True, exist_ok=True) release_notes_j2 = example_project_template_dir / ".release_notes.md.j2" release_notes_j2.write_text(EXAMPLE_RELEASE_NOTES_TEMPLATE) return _use_release_notes_template # @pytest.fixture # def example_pyproject_toml( # example_project_dir: ExProjectDir, # pyproject_toml_file: Path, # ) -> Path: # return example_project_dir / pyproject_toml_file # @pytest.fixture # def example_dist_dir( # example_project_dir: ExProjectDir, # dist_dir: Path, # ) -> Path: # return example_project_dir / dist_dir # @pytest.fixture # def example_project_wheel_file( # example_dist_dir: Path, # get_wheel_file: GetWheelFileFn, # ) -> Path: # return example_dist_dir / get_wheel_file(EXAMPLE_PROJECT_VERSION) # Note this is just the path and the content may change # @pytest.fixture # def example_changelog_md( # example_project_dir: ExProjectDir, # changelog_md_file: Path, # ) -> Path: # return example_project_dir / changelog_md_file # Note this is just the path and the content may change # @pytest.fixture # def example_changelog_rst( # example_project_dir: ExProjectDir, # changelog_rst_file: Path, # ) -> Path: # return example_project_dir / changelog_rst_file # @pytest.fixture # def example_project_template_dir( # example_project_dir: ExProjectDir, # changelog_template_dir: Path, # ) -> Path: # return example_project_dir / changelog_template_dir @pytest.fixture(scope="session") def update_pkg_pyproject_toml( update_pyproject_toml: UpdatePyprojectTomlFn, monorepo_pkg_pyproject_toml_file_pattern: str, ) -> UpdatePkgPyprojectTomlFn: """Update the pyproject.toml file with the given content.""" def _update_pyproject_toml(pkg_name: str, setting: str, value: Any) -> None: toml_file = Path( monorepo_pkg_pyproject_toml_file_pattern.format(package_name=pkg_name) ).resolve() if not toml_file.exists(): raise FileNotFoundError( f"pyproject.toml file for package {pkg_name} not found at {toml_file}" ) update_pyproject_toml( setting=setting, value=value, toml_file=toml_file, ) return _update_pyproject_toml python-semantic-release-10.4.1/tests/fixtures/monorepos/git_monorepo.py000066400000000000000000000156041506116242600264550ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from shutil import rmtree from typing import TYPE_CHECKING import pytest from git import Repo import tests.conftest import tests.const import tests.fixtures.git_repo import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, EXAMPLE_PROJECT_NAME, ) from tests.util import copy_dir_tree if TYPE_CHECKING: from typing import Protocol, Sequence from git import Actor from semantic_release.hvcs import HvcsBase from tests.conftest import ( BuildRepoOrCopyCacheFn, GetMd5ForSetOfFilesFn, RepoActions, ) from tests.fixtures.git_repo import ( BuildRepoFn, CommitConvention, TomlSerializableTypes, ) class BuildMonorepoFn(Protocol): def __call__(self, dest_dir: Path | str) -> Path: ... @pytest.fixture(scope="session") def deps_files_4_example_git_monorepo( deps_files_4_example_monorepo: list[Path], ) -> list[Path]: return [ *deps_files_4_example_monorepo, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), Path(tests.fixtures.git_repo.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_example_git_monorepo( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_example_git_monorepo: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_example_git_monorepo) @pytest.fixture(scope="session") def cached_example_git_monorepo( build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_example_git_monorepo: str, cached_example_monorepo: Path, example_git_https_url: str, commit_author: Actor, ) -> Path: """ Initializes an example monorepo project with git. DO NOT USE DIRECTLY. Use a `repo_*` fixture instead. This creates a default base repository, all settings can be changed later through from the example_project_git_repo fixture's return object and manual adjustment. """ def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: if not cached_example_monorepo.exists(): raise RuntimeError("Unable to find cached monorepo files") # make a copy of the example monorepo as a base copy_dir_tree(cached_example_monorepo, cached_repo_path) # initialize git repo (open and close) # NOTE: We don't want to hold the repo object open for the entire test session, # the implementation on Windows holds some file descriptors open until close is called. with Repo.init(cached_repo_path) as repo: rmtree(str(Path(repo.git_dir, "hooks"))) # Without this the global config may set it to "master", we want consistency repo.git.branch("-M", DEFAULT_BRANCH_NAME) with repo.config_writer("repository") as config: config.set_value("user", "name", commit_author.name) config.set_value("user", "email", commit_author.email) config.set_value("commit", "gpgsign", False) config.set_value("tag", "gpgsign", False) repo.create_remote(name="origin", url=example_git_https_url) # make sure all base files are in index to enable initial commit repo.index.add(("*", ".gitignore")) # This is a special build, we don't expose the Repo Actions to the caller return [] # End of _build_repo() return build_repo_or_copy_cache( repo_name=cached_example_git_monorepo.__name__.split("_", maxsplit=1)[1], build_spec_hash=build_spec_hash_4_example_git_monorepo, build_repo_func=_build_repo, ) @pytest.fixture(scope="session") def file_in_pkg_pattern(file_in_repo: str, monorepo_pkg_dir_pattern: str) -> str: return str(Path(monorepo_pkg_dir_pattern) / file_in_repo) @pytest.fixture(scope="session") def file_in_monorepo_pkg1( monorepo_pkg1_name: str, file_in_pkg_pattern: str, ) -> Path: return Path(file_in_pkg_pattern.format(pkg_name=monorepo_pkg1_name)) @pytest.fixture(scope="session") def file_in_monorepo_pkg2( monorepo_pkg2_name: str, file_in_pkg_pattern: str, ) -> Path: return Path(file_in_pkg_pattern.format(pkg_name=monorepo_pkg2_name)) @pytest.fixture(scope="session") def build_base_monorepo( # noqa: C901 cached_example_git_monorepo: Path, ) -> BuildMonorepoFn: """ This fixture is intended to simplify repo scenario building by initially creating the repo but also configuring semantic_release in the pyproject.toml for when the test executes semantic_release. It returns a function so that derivative fixtures can call this fixture with individual parameters. """ def _build_configured_base_monorepo(dest_dir: Path | str) -> Path: if not cached_example_git_monorepo.exists(): raise RuntimeError("Unable to find cached git project files!") # Copy the cached git project the dest directory copy_dir_tree(cached_example_git_monorepo, dest_dir) return Path(dest_dir) return _build_configured_base_monorepo @pytest.fixture(scope="session") def configure_monorepo_package( # noqa: C901 configure_base_repo: BuildRepoFn, ) -> BuildRepoFn: """ This fixture is intended to simplify repo scenario building by initially creating the repo but also configuring semantic_release in the pyproject.toml for when the test executes semantic_release. It returns a function so that derivative fixtures can call this fixture with individual parameters. """ def _configure( # noqa: C901 dest_dir: Path | str, commit_type: CommitConvention = "conventional", hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, # Default as of v10 package_name: str = EXAMPLE_PROJECT_NAME, monorepo: bool = True, ) -> tuple[Path, HvcsBase]: if not monorepo: raise ValueError("This fixture is only for monorepo packages!") if not Path(dest_dir).exists(): raise RuntimeError(f"Destination directory {dest_dir} does not exist!") return configure_base_repo( dest_dir=dest_dir, commit_type=commit_type, hvcs_client_name=hvcs_client_name, hvcs_domain=hvcs_domain, tag_format_str=tag_format_str, extra_configs=extra_configs, mask_initial_release=mask_initial_release, package_name=package_name, monorepo=monorepo, ) return _configure python-semantic-release-10.4.1/tests/fixtures/monorepos/github_flow/000077500000000000000000000000001506116242600257055ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/fixtures/monorepos/github_flow/__init__.py000066400000000000000000000002351506116242600300160ustar00rootroot00000000000000from tests.fixtures.monorepos.github_flow.monorepo_w_default_release import * from tests.fixtures.monorepos.github_flow.monorepo_w_release_channels import * python-semantic-release-10.4.1/tests/fixtures/monorepos/github_flow/monorepo_w_default_release.py000066400000000000000000001200221506116242600336440ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.commit_parser.conventional.options_monorepo import ( ConventionalCommitMonorepoParserOptions, ) from semantic_release.commit_parser.conventional.parser_monorepo import ( ConventionalCommitMonorepoParser, ) from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, CommitSpec, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitHubSquashCommitMsgFn, GetRepoDefinitionFn, RepoActionChangeDirectory, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) @pytest.fixture(scope="session") def deps_files_4_github_flow_monorepo_w_default_release_channel( deps_files_4_example_git_monorepo: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_monorepo, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_github_flow_monorepo_w_default_release_channel( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_github_flow_monorepo_w_default_release_channel: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files( deps_files_4_github_flow_monorepo_w_default_release_channel ) @pytest.fixture(scope="session") def get_repo_definition_4_github_flow_monorepo_w_default_release_channel( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_squash_commit_msg_github: FormatGitHubSquashCommitMsgFn, monorepo_pkg1_changelog_md_file: Path, monorepo_pkg1_changelog_rst_file: Path, monorepo_pkg2_changelog_md_file: Path, monorepo_pkg2_changelog_rst_file: Path, monorepo_pkg1_name: str, monorepo_pkg2_name: str, monorepo_pkg1_dir: Path, monorepo_pkg2_dir: Path, monorepo_pkg1_version_py_file: Path, monorepo_pkg2_version_py_file: Path, monorepo_pkg1_pyproject_toml_file: Path, monorepo_pkg2_pyproject_toml_file: Path, stable_now_date: GetStableDateNowFn, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ Builds a Monorepo with the GitHub Flow branching strategy and a squash commit merging strategy for a single release channel on the default branch. Implementation: - The monorepo contains two packages, each with its own internal changelog but shared template. - The repository implements the following git graph: ``` * chore(release): pkg1@1.1.0 [skip ci] (tag: pkg1-v1.1.0, branch: main, HEAD -> main) * feat(pkg1): file modified outside of pkg 1, identified by scope (#5) | | * feat(pkg1): file modified outside of pkg 1, identified by scope (branch: pkg1/feat/pr-4) |/ * chore(release): pkg2@1.1.1 [skip ci] (tag: pkg2-v1.1.1) * fix(pkg2-cli): file modified outside of pkg 2, identified by scope (#4) | | * fix(pkg2-cli): file modified outside of pkg 2, identified by scope (branch: pkg2/fix/pr-3) |/ * chore(release): pkg2@1.1.0 [skip ci] (tag: pkg2-v1.1.0) * feat: no pkg scope but file in pkg 2 directory (#3) # Squash merge of pkg2/feat/pr-2 * chore(release): pkg1@1.0.1 [skip ci] (tag: pkg1-v1.0.1) * fix: no pkg scope but file in pkg 1 directory (#2) # Squash merge of pkg1/fix/pr-1 | | * docs(cli): add cli documentation | * test(cli): add cli tests | * feat: no pkg scope but file in pkg 2 directory (branch: pkg2/feat/pr-2) |/ | * fix: no pkg scope but file in pkg 1 directory (branch: pkg1/fix/pr-1) |/ * chore(release): pkg2@1.0.0 [skip ci] (tag: pkg2-v1.0.0) # Initial release of pkg 2 * chore(release): pkg1@1.0.0 [skip ci] (tag: pkg1-v1.0.0) # Initial release of pkg 1 * Initial commit # Includes core functionality for both packages ``` """ def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = default_tag_format_str, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) pr_num_gen = (i for i in count(start=2, step=1)) pkg1_changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": monorepo_pkg1_changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": True, }, { "path": monorepo_pkg1_changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": True, }, ] pkg2_changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": monorepo_pkg2_changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": True, }, { "path": monorepo_pkg2_changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": True, }, ] change_to_pkg1_dir: RepoActionChangeDirectory = { "action": RepoActionStep.CHANGE_DIRECTORY, "details": { "directory": monorepo_pkg1_dir, }, } change_to_pkg2_dir: RepoActionChangeDirectory = { "action": RepoActionStep.CHANGE_DIRECTORY, "details": { "directory": monorepo_pkg2_dir, }, } change_to_example_project_dir: RepoActionChangeDirectory = { "action": RepoActionStep.CHANGE_DIRECTORY, "details": { "directory": "/", }, } if commit_type != "conventional": raise ValueError(f"Unsupported commit type: {commit_type}") pkg1_commit_parser = ConventionalCommitMonorepoParser( options=ConventionalCommitMonorepoParserOptions( parse_squash_commits=True, ignore_merge_commits=ignore_merge_commits, scope_prefix=f"{monorepo_pkg1_name}-?", path_filters=(".",), ) ) pkg2_commit_parser = ConventionalCommitMonorepoParser( options=ConventionalCommitMonorepoParserOptions( parse_squash_commits=pkg1_commit_parser.options.parse_squash_commits, ignore_merge_commits=pkg1_commit_parser.options.ignore_merge_commits, scope_prefix=f"{monorepo_pkg2_name}-?", path_filters=(".",), ) ) common_configs: dict[str, TomlSerializableTypes] = { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": False, "tool.semantic_release.changelog.exclude_commit_patterns": [r"^chore"], "tool.semantic_release.commit_parser": f"{commit_type}-monorepo", "tool.semantic_release.commit_parser_options.parse_squash_commits": pkg1_commit_parser.options.parse_squash_commits, "tool.semantic_release.commit_parser_options.ignore_merge_commits": pkg1_commit_parser.options.ignore_merge_commits, } mr1_pkg1_fix_branch_name = f"{monorepo_pkg1_name}/fix/pr-1" mr2_pkg2_feat_branch_name = f"{monorepo_pkg2_name}/feat/pr-2" mr3_pkg2_fix_branch_name = f"{monorepo_pkg2_name}/fix/pr-3" mr4_pkg1_feat_branch_name = f"{monorepo_pkg1_name}/feat/pr-4" pkg1_new_version = Version.parse( "1.0.0", tag_format=f"{monorepo_pkg1_name}-{tag_format_str}" ) pkg2_new_version = Version.parse( "1.0.0", tag_format=f"{monorepo_pkg2_name}-{tag_format_str}" ) repo_construction_steps: list[RepoActions] = [ { "action": RepoActionStep.CREATE_MONOREPO, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "post_actions": [ { "action": RepoActionStep.CONFIGURE_MONOREPO, "details": { "package_dir": monorepo_pkg1_dir, "package_name": monorepo_pkg1_name, "tag_format_str": pkg1_new_version.tag_format, "mask_initial_release": mask_initial_release, "extra_configs": { **common_configs, "tool.semantic_release.commit_message": ( pkg1_cmt_msg_format := dedent( f"""\ chore(release): {monorepo_pkg1_name}@{{version}} [skip ci] Automatically generated by python-semantic-release """ ) ), "tool.semantic_release.commit_parser_options.scope_prefix": pkg1_commit_parser.options.scope_prefix, "tool.semantic_release.commit_parser_options.path_filters": pkg1_commit_parser.options.path_filters, **(extra_configs or {}), }, }, }, { "action": RepoActionStep.CONFIGURE_MONOREPO, "details": { "package_dir": monorepo_pkg2_dir, "package_name": monorepo_pkg2_name, "tag_format_str": pkg2_new_version.tag_format, "mask_initial_release": mask_initial_release, "extra_configs": { **common_configs, "tool.semantic_release.commit_message": ( pkg2_cmt_msg_format := dedent( f"""\ chore(release): {monorepo_pkg2_name}@{{version}} [skip ci] Automatically generated by python-semantic-release """ ) ), "tool.semantic_release.commit_parser_options.scope_prefix": pkg2_commit_parser.options.scope_prefix, "tool.semantic_release.commit_parser_options.path_filters": pkg2_commit_parser.options.path_filters, **(extra_configs or {}), }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_c1_initial := "c1_initial_commit" ), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, ], commit_type, # this parser does not matter since the commit is common parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), }, }, ], }, } ] repo_construction_steps.extend( [ { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg1_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg1_new_version.tag_format, "version_py_file": monorepo_pkg1_version_py_file.relative_to( monorepo_pkg1_dir ), "commit_message_format": pkg1_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg1_new_version, "dest_files": pkg1_changelog_file_definitions, "commit_ids": [cid_c1_initial], }, }, change_to_pkg1_dir, ], "post_actions": [change_to_example_project_dir], }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg2_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg2_new_version.tag_format, "version_py_file": monorepo_pkg2_version_py_file.relative_to( monorepo_pkg2_dir ), "commit_message_format": pkg2_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg2_new_version, "dest_files": pkg2_changelog_file_definitions, "commit_ids": [cid_c1_initial], }, }, change_to_pkg2_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) pkg1_fix_branch_commits: Sequence[CommitSpec] = [ { "cid": "pkg1-fix-1-squashed", "conventional": "fix: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", "emoji": ":bug: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", "scipy": "MAINT: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", "datetime": next(commit_timestamp_gen), }, ] repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": mr1_pkg1_fix_branch_name, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "pre_actions": [change_to_pkg1_dir], "commits": convert_commit_specs_to_commit_defs( [ { **commit, "include_in_changelog": False, } for commit in pkg1_fix_branch_commits ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), "post_actions": [change_to_example_project_dir], }, }, ] ) # simulate separate work by another person at same time as the fix branch pkg2_feat_branch_commits: Sequence[CommitSpec] = [ { "cid": "pkg2-feat-1-squashed", "conventional": "feat: no pkg scope but file in pkg 2 directory", "emoji": ":sparkles: no pkg scope but file in pkg 2 directory", "scipy": "ENH: no pkg scope but file in pkg 2 directory", "datetime": next(commit_timestamp_gen), }, { "cid": "pkg2-feat-2-squashed", "conventional": "test(cli): add cli tests", "emoji": ":checkmark: add cli tests", "scipy": "TST: add cli tests", "datetime": next(commit_timestamp_gen), }, { "cid": "pkg2-feat-3-squashed", "conventional": "docs(cli): add cli documentation", "emoji": ":memo: add cli documentation", "scipy": "DOC: add cli documentation", "datetime": next(commit_timestamp_gen), }, ] repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": mr2_pkg2_feat_branch_name, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "pre_actions": [change_to_pkg2_dir], "commits": convert_commit_specs_to_commit_defs( [ { **commit, "include_in_changelog": False, } for commit in pkg2_feat_branch_commits ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg2_commit_parser, ), monorepo=True, ), "post_actions": [change_to_example_project_dir], }, }, ] ) pkg1_new_version = Version.parse( "1.0.1", tag_format=pkg1_new_version.tag_format ) all_commit_types: list[CommitConvention] = ["conventional", "emoji", "scipy"] fix_branch_pr_number = next(pr_num_gen) fix_branch_squash_commit_spec: CommitSpec = { "cid": "mr1-pkg1-fix", **{ # type: ignore[typeddict-item] cmt_type: format_squash_commit_msg_github( # Use the primary commit message as the PR title pr_title=pkg1_fix_branch_commits[0][cmt_type], pr_number=fix_branch_pr_number, squashed_commits=[ cmt[commit_type] for cmt in pkg1_fix_branch_commits[1:] ], ) for cmt_type in all_commit_types }, "datetime": next(commit_timestamp_gen), "include_in_changelog": True, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_SQUASH, "details": { "branch": mr1_pkg1_fix_branch_name, "strategy_option": "theirs", "commit_def": convert_commit_spec_to_commit_def( fix_branch_squash_commit_spec, commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), "config_file": monorepo_pkg1_pyproject_toml_file, }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg1_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg1_new_version.tag_format, "version_py_file": monorepo_pkg1_version_py_file.relative_to( monorepo_pkg1_dir ), "commit_message_format": pkg1_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg1_new_version, "dest_files": pkg1_changelog_file_definitions, "commit_ids": [ f'{fix_branch_squash_commit_spec["cid"]}-{index + 1}' for index in range(len(pkg1_fix_branch_commits)) ], }, }, change_to_pkg1_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) feat_branch_pr_number = next(pr_num_gen) feat_branch_squash_commit_spec: CommitSpec = { "cid": "mr2-pkg2-feat", **{ # type: ignore[typeddict-item] cmt_type: format_squash_commit_msg_github( # Use the primary commit message as the PR title pr_title=pkg2_feat_branch_commits[0][cmt_type], pr_number=feat_branch_pr_number, squashed_commits=[ cmt[commit_type] for cmt in pkg2_feat_branch_commits[1:] ], ) for cmt_type in all_commit_types }, "datetime": next(commit_timestamp_gen), "include_in_changelog": True, } pkg2_new_version = Version.parse( "1.1.0", tag_format=pkg2_new_version.tag_format ) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_SQUASH, "details": { "branch": mr2_pkg2_feat_branch_name, "strategy_option": "theirs", "commit_def": convert_commit_spec_to_commit_def( feat_branch_squash_commit_spec, commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg2_commit_parser, ), monorepo=True, ), "config_file": monorepo_pkg2_pyproject_toml_file, }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg2_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg2_new_version.tag_format, "version_py_file": monorepo_pkg2_version_py_file.relative_to( monorepo_pkg2_dir ), "commit_message_format": pkg2_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg2_new_version, "dest_files": pkg2_changelog_file_definitions, "commit_ids": [ f'{feat_branch_squash_commit_spec["cid"]}-{index + 1}' for index in range( len(pkg2_feat_branch_commits) ) ], }, }, change_to_pkg2_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) pkg2_fix_branch_commits: Sequence[CommitSpec] = [ { "cid": "pkg2-fix-1-squashed", "conventional": "fix(pkg2-cli): file modified outside of pkg 2, identified by scope\n\nResolves: #123\n", "emoji": ":bug: (pkg2-cli) file modified outside of pkg 2, identified by scope\n\nResolves: #123\n", "scipy": "MAINT:pkg2-cli: file modified outside of pkg 2, identified by scope\n\nResolves: #123\n", "datetime": next(commit_timestamp_gen), }, ] repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": mr3_pkg2_fix_branch_name, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { **commit, "include_in_changelog": False, } for commit in pkg2_fix_branch_commits ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg2_commit_parser, ), monorepo=True, ), }, }, ] ) pkg2_new_version = Version.parse( "1.1.1", tag_format=pkg2_new_version.tag_format ) fix_branch_pr_number = next(pr_num_gen) fix_branch_squash_commit_spec = { "cid": "mr3-pkg2-fix", **{ # type: ignore[typeddict-item] cmt_type: format_squash_commit_msg_github( # Use the primary commit message as the PR title pr_title=pkg2_fix_branch_commits[0][cmt_type], pr_number=fix_branch_pr_number, squashed_commits=[ cmt[commit_type] for cmt in pkg2_fix_branch_commits[1:] ], ) for cmt_type in all_commit_types }, "datetime": next(commit_timestamp_gen), "include_in_changelog": True, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_SQUASH, "details": { "branch": mr3_pkg2_fix_branch_name, "strategy_option": "theirs", "commit_def": convert_commit_spec_to_commit_def( fix_branch_squash_commit_spec, commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg2_commit_parser, ), monorepo=True, ), "config_file": monorepo_pkg2_pyproject_toml_file, }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg2_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg2_new_version.tag_format, "version_py_file": monorepo_pkg2_version_py_file.relative_to( monorepo_pkg2_dir ), "commit_message_format": pkg2_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg2_new_version, "dest_files": pkg2_changelog_file_definitions, "commit_ids": [ f'{fix_branch_squash_commit_spec["cid"]}-{index + 1}' for index in range(len(pkg2_fix_branch_commits)) ], }, }, change_to_pkg2_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) pkg1_feat_branch_commits: Sequence[CommitSpec] = [ { "cid": "pkg1-feat-1-squashed", "conventional": "feat(pkg1): file modified outside of pkg 1, identified by scope", "emoji": ":sparkles: (pkg1) file modified outside of pkg 1, identified by scope", "scipy": "ENH:pkg1: file modified outside of pkg 1, identified by scope", "datetime": next(commit_timestamp_gen), } ] repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": mr4_pkg1_feat_branch_name, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { **commit, "include_in_changelog": False, } for commit in pkg1_feat_branch_commits ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), }, }, ] ) feat_branch_pr_number = next(pr_num_gen) feat_branch_squash_commit_spec = { "cid": "mr4-pkg1-feat", **{ # type: ignore[typeddict-item] cmt_type: format_squash_commit_msg_github( # Use the primary commit message as the PR title pr_title=pkg1_feat_branch_commits[0][cmt_type], pr_number=feat_branch_pr_number, squashed_commits=[ cmt[commit_type] for cmt in pkg1_feat_branch_commits[1:] ], ) for cmt_type in all_commit_types }, "datetime": next(commit_timestamp_gen), "include_in_changelog": True, } pkg1_new_version = Version.parse( "1.1.0", tag_format=pkg1_new_version.tag_format ) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_SQUASH, "details": { "branch": mr4_pkg1_feat_branch_name, "strategy_option": "theirs", "commit_def": convert_commit_spec_to_commit_def( feat_branch_squash_commit_spec, commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), "config_file": monorepo_pkg1_pyproject_toml_file, }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg1_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg1_new_version.tag_format, "version_py_file": monorepo_pkg1_version_py_file.relative_to( monorepo_pkg1_dir ), "commit_message_format": pkg1_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg1_new_version, "dest_files": pkg1_changelog_file_definitions, "commit_ids": [ f'{feat_branch_squash_commit_spec["cid"]}-{index + 1}' for index in range( len(pkg1_feat_branch_commits) ) ], }, }, change_to_pkg1_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_monorepo_w_github_flow_w_default_release_channel( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_github_flow_monorepo_w_default_release_channel: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_github_flow_monorepo_w_default_release_channel: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_github_flow_monorepo_w_default_release_channel( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_github_flow_monorepo_w_default_release_channel, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def monorepo_w_github_flow_w_default_release_channel_conventional_commits( build_monorepo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = ( monorepo_w_github_flow_w_default_release_channel_conventional_commits.__name__ ) commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_monorepo_w_github_flow_w_default_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/monorepos/github_flow/monorepo_w_release_channels.py000066400000000000000000001157071506116242600340310ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.commit_parser.conventional.options_monorepo import ( ConventionalCommitMonorepoParserOptions, ) from semantic_release.commit_parser.conventional.parser_monorepo import ( ConventionalCommitMonorepoParser, ) from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitHubMergeCommitMsgFn, GetRepoDefinitionFn, RepoActionChangeDirectory, RepoActionGitMerge, RepoActionGitMergeDetails, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) @pytest.fixture(scope="session") def deps_files_4_github_flow_monorepo_w_feature_release_channel( deps_files_4_example_git_monorepo: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_monorepo, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_github_flow_monorepo_w_feature_release_channel( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_github_flow_monorepo_w_feature_release_channel: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files( deps_files_4_github_flow_monorepo_w_feature_release_channel ) @pytest.fixture(scope="session") def get_repo_definition_4_github_flow_monorepo_w_feature_release_channel( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_github: FormatGitHubMergeCommitMsgFn, monorepo_pkg1_changelog_md_file: Path, monorepo_pkg1_changelog_rst_file: Path, monorepo_pkg2_changelog_md_file: Path, monorepo_pkg2_changelog_rst_file: Path, monorepo_pkg1_name: str, monorepo_pkg2_name: str, monorepo_pkg1_dir: Path, monorepo_pkg2_dir: Path, monorepo_pkg1_version_py_file: Path, monorepo_pkg2_version_py_file: Path, stable_now_date: GetStableDateNowFn, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ Builds a Monorepo with the GitHub Flow branching strategy and a merge commit merging strategy for alpha feature releases and official releases on the default branch. Implementation: - The monorepo contains two packages, each with its own internal changelog but shared template. - The repository implements the following git graph: ``` * chore(release): pkg2@1.1.0 [skip ci] (tag: pkg2-v1.1.0) * Merge pull request #3 from 'pkg2/feat/pr-2' |\ | * chore(release): pkg2@1.1.0-alpha.2 [skip ci] (tag: pkg2-v1.1.0-alpha.2, branch: pkg2/feat/pr-2) | * fix(pkg2-cli): file modified outside of pkg 2, identified by scope | * chore(release): pkg2@1.1.0-alpha.1 [skip ci] (tag: pkg2-v1.1.0-alpha.1) | * docs: add cli documentation | * test: add cli tests | * feat: no pkg scope but file in pkg 2 directory |/ * chore(release): pkg1@1.0.1 [skip ci] (tag: pkg1-v1.0.1) * Merge pull request #2 from 'pkg1/fix/pr-1' |\ | * chore(release): pkg1@1.0.1-alpha.2 [skip ci] (tag: pkg1-v1.0.1-alpha.2, branch: pkg1/fix/pr-1) | * fix(pkg1-cli): file modified outside of pkg 1, identified by scope | * chore(release): pkg1@1.0.1-alpha.1 [skip ci] (tag: pkg1-v1.0.1-alpha.1) | * fix: no pkg scope but file in pkg 1 directory |/ * chore(release): pkg2@1.0.0 [skip ci] (tag: pkg2-v1.0.0) # Initial release of pkg 2 * chore(release): pkg1@1.0.0 [skip ci] (tag: pkg1-v1.0.0) # Initial release of pkg 1 * Initial commit # Includes core functionality for both packages ``` """ def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = default_tag_format_str, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) pr_num_gen = (i for i in count(start=2, step=1)) pkg1_changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": monorepo_pkg1_changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": True, }, { "path": monorepo_pkg1_changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": True, }, ] pkg2_changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": monorepo_pkg2_changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": True, }, { "path": monorepo_pkg2_changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": True, }, ] change_to_pkg1_dir: RepoActionChangeDirectory = { "action": RepoActionStep.CHANGE_DIRECTORY, "details": { "directory": monorepo_pkg1_dir, }, } change_to_pkg2_dir: RepoActionChangeDirectory = { "action": RepoActionStep.CHANGE_DIRECTORY, "details": { "directory": monorepo_pkg2_dir, }, } change_to_example_project_dir: RepoActionChangeDirectory = { "action": RepoActionStep.CHANGE_DIRECTORY, "details": { "directory": "/", }, } if commit_type != "conventional": raise ValueError(f"Unsupported commit type: {commit_type}") pkg1_commit_parser = ConventionalCommitMonorepoParser( options=ConventionalCommitMonorepoParserOptions( parse_squash_commits=True, ignore_merge_commits=ignore_merge_commits, scope_prefix=f"{monorepo_pkg1_name}-?", path_filters=(".",), ) ) pkg2_commit_parser = ConventionalCommitMonorepoParser( options=ConventionalCommitMonorepoParserOptions( parse_squash_commits=pkg1_commit_parser.options.parse_squash_commits, ignore_merge_commits=pkg1_commit_parser.options.ignore_merge_commits, scope_prefix=f"{monorepo_pkg2_name}-?", path_filters=(".",), ) ) common_configs: dict[str, TomlSerializableTypes] = { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": False, "tool.semantic_release.changelog.exclude_commit_patterns": [r"^chore"], "tool.semantic_release.commit_parser": f"{commit_type}-monorepo", "tool.semantic_release.commit_parser_options.parse_squash_commits": pkg1_commit_parser.options.parse_squash_commits, "tool.semantic_release.commit_parser_options.ignore_merge_commits": pkg1_commit_parser.options.ignore_merge_commits, } mr1_pkg1_fix_branch_name = f"{monorepo_pkg1_name}/fix/pr-1" mr2_pkg2_feat_branch_name = f"{monorepo_pkg2_name}/feat/pr-2" pkg1_new_version = Version.parse( "1.0.0", tag_format=f"{monorepo_pkg1_name}-{tag_format_str}" ) pkg2_new_version = Version.parse( "1.0.0", tag_format=f"{monorepo_pkg2_name}-{tag_format_str}" ) repo_construction_steps: list[RepoActions] = [ { "action": RepoActionStep.CREATE_MONOREPO, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "post_actions": [ { "action": RepoActionStep.CONFIGURE_MONOREPO, "details": { "package_dir": monorepo_pkg1_dir, "package_name": monorepo_pkg1_name, "tag_format_str": pkg1_new_version.tag_format, "mask_initial_release": mask_initial_release, "extra_configs": { **common_configs, "tool.semantic_release.commit_message": ( pkg1_cmt_msg_format := dedent( f"""\ chore(release): {monorepo_pkg1_name}@{{version}} [skip ci] Automatically generated by python-semantic-release """ ) ), # package branches "feat/" & "fix/" has prerelease suffix of "alpha" "tool.semantic_release.branches.alpha-release": { "match": rf"^{monorepo_pkg1_name}/(feat|fix)/.+", "prerelease": True, "prerelease_token": "alpha", }, "tool.semantic_release.commit_parser_options.scope_prefix": pkg1_commit_parser.options.scope_prefix, "tool.semantic_release.commit_parser_options.path_filters": pkg1_commit_parser.options.path_filters, **(extra_configs or {}), }, }, }, { "action": RepoActionStep.CONFIGURE_MONOREPO, "details": { "package_dir": monorepo_pkg2_dir, "package_name": monorepo_pkg2_name, "tag_format_str": pkg2_new_version.tag_format, "mask_initial_release": mask_initial_release, "extra_configs": { **common_configs, "tool.semantic_release.commit_message": ( pkg2_cmt_msg_format := dedent( f"""\ chore(release): {monorepo_pkg2_name}@{{version}} [skip ci] Automatically generated by python-semantic-release """ ) ), # package branches "feat/" & "fix/" has prerelease suffix of "alpha" "tool.semantic_release.branches.alpha-release": { "match": rf"^{monorepo_pkg2_name}/(feat|fix)/.+", "prerelease": True, "prerelease_token": "alpha", }, "tool.semantic_release.commit_parser_options.scope_prefix": pkg2_commit_parser.options.scope_prefix, "tool.semantic_release.commit_parser_options.path_filters": pkg2_commit_parser.options.path_filters, **(extra_configs or {}), }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_c1_initial := "c1_initial_commit" ), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, ], commit_type, # this parser does not matter since the commit is common parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), }, }, ], }, } ] repo_construction_steps.extend( [ { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg1_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg1_new_version.tag_format, "version_py_file": monorepo_pkg1_version_py_file.relative_to( monorepo_pkg1_dir ), "commit_message_format": pkg1_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg1_new_version, "dest_files": pkg1_changelog_file_definitions, "commit_ids": [cid_c1_initial], }, }, change_to_pkg1_dir, ], "post_actions": [change_to_example_project_dir], }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg2_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg2_new_version.tag_format, "version_py_file": monorepo_pkg2_version_py_file.relative_to( monorepo_pkg2_dir ), "commit_message_format": pkg2_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg2_new_version, "dest_files": pkg2_changelog_file_definitions, "commit_ids": [cid_c1_initial], }, }, change_to_pkg2_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) # Make a fix in package 1 and release it as an alpha release pkg1_new_version = Version.parse( "1.0.1-alpha.1", tag_format=pkg1_new_version.tag_format ) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": mr1_pkg1_fix_branch_name, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "pre_actions": [change_to_pkg1_dir], "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_pkg1_fib1_c1_fix := "pkg1_fix_branch_1_c1_fix" ), "conventional": "fix: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", "emoji": ":bug: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", "scipy": "MAINT: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), "post_actions": [change_to_example_project_dir], }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg1_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg1_new_version.tag_format, "version_py_file": monorepo_pkg1_version_py_file.relative_to( monorepo_pkg1_dir ), "commit_message_format": pkg1_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg1_new_version, "dest_files": pkg1_changelog_file_definitions, "commit_ids": [cid_pkg1_fib1_c1_fix], }, }, change_to_pkg1_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) # Update the fix in package 1 and release another alpha release pkg1_new_version = Version.parse( "1.0.1-alpha.2", tag_format=pkg1_new_version.tag_format ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_pkg1_fib1_c2_fix := "pkg1_fix_branch_1_c2_fix" ), "conventional": "fix(pkg1-cli): file modified outside of pkg 1, identified by scope\n\n", "emoji": ":bug: (pkg1-cli) file modified outside of pkg 1, identified by scope", "scipy": "MAINT:pkg1-cli: file modified outside of pkg 1, identified by scope", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg1_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg1_new_version.tag_format, "version_py_file": monorepo_pkg1_version_py_file.relative_to( monorepo_pkg1_dir ), "commit_message_format": pkg1_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg1_new_version, "dest_files": pkg1_changelog_file_definitions, "commit_ids": [cid_pkg1_fib1_c2_fix], }, }, change_to_pkg1_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) # Merge the fix branch into the default branch and formally release it pkg1_new_version = Version.parse( "1.0.1", tag_format=pkg1_new_version.tag_format ) merge_def_type_placeholder: RepoActionGitMerge[RepoActionGitMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": mr1_pkg1_fix_branch_name, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_pkg1_fib1_merge := "pkg1_fix_branch_1_merge"), "conventional": ( merge_msg := format_merge_commit_msg_github( pr_number=next(pr_num_gen), branch_name=mr1_pkg1_fix_branch_name, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg1_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg1_new_version.tag_format, "version_py_file": monorepo_pkg1_version_py_file.relative_to( monorepo_pkg1_dir ), "commit_message_format": pkg1_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg1_new_version, "dest_files": pkg1_changelog_file_definitions, "commit_ids": [cid_pkg1_fib1_merge], }, }, change_to_pkg1_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) # Make a feature branch and release it as an alpha release pkg2_new_version = Version.parse( "1.1.0-alpha.1", tag_format=pkg2_new_version.tag_format ) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": mr2_pkg2_feat_branch_name, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "pre_actions": [change_to_pkg2_dir], "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_pkg2_feb1_c1_feat := "pkg2_feat_branch_1_c1_feat" ), "conventional": "feat: no pkg scope but file in pkg 2 directory\n", "emoji": ":sparkles: no pkg scope but file in pkg 2 directory", "scipy": "ENH: no pkg scope but file in pkg 2 directory", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, { "cid": ( cid_pkg2_feb1_c2_test := "pkg2_feat_branch_1_c2_test" ), "conventional": "test: add cli tests", "emoji": ":checkmark: add cli tests", "scipy": "TST: add cli tests", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, { "cid": ( cid_pkg2_feb1_c3_docs := "pkg2_feat_branch_1_c3_docs" ), "conventional": "docs: add cli documentation", "emoji": ":memo: add cli documentation", "scipy": "DOC: add cli documentation", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg2_commit_parser, ), monorepo=True, ), "post_actions": [change_to_example_project_dir], }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg2_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg2_new_version.tag_format, "version_py_file": monorepo_pkg2_version_py_file.relative_to( monorepo_pkg2_dir ), "commit_message_format": pkg2_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg2_new_version, "dest_files": pkg2_changelog_file_definitions, "commit_ids": [ cid_pkg2_feb1_c1_feat, cid_pkg2_feb1_c2_test, cid_pkg2_feb1_c3_docs, ], }, }, change_to_pkg2_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) # Update the feat with a fix in package 2 and release another alpha release pkg2_new_version = Version.parse( "1.1.0-alpha.2", tag_format=pkg2_new_version.tag_format ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_pkg2_feb1_c4_fix := "pkg2_feat_branch_1_c4_fix" ), "conventional": "fix(pkg2-cli): file modified outside of pkg 2, identified by scope", "emoji": ":bug: (pkg2-cli) file modified outside of pkg 2, identified by scope", "scipy": "MAINT:pkg2-cli: file modified outside of pkg 2, identified by scope", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg2_commit_parser, ), monorepo=True, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg2_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg2_new_version.tag_format, "version_py_file": monorepo_pkg2_version_py_file.relative_to( monorepo_pkg2_dir ), "commit_message_format": pkg2_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg2_new_version, "dest_files": pkg2_changelog_file_definitions, "commit_ids": [cid_pkg2_feb1_c4_fix], }, }, change_to_pkg2_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) # Merge the feat branch into the default branch and formally release a package 2 pkg2_new_version = Version.parse( "1.1.0", tag_format=pkg2_new_version.tag_format ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": mr2_pkg2_feat_branch_name, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_pkg2_feb1_merge := "pkg2_feat_branch_1_merge"), "conventional": ( merge_msg := format_merge_commit_msg_github( pr_number=next(pr_num_gen), branch_name=mr2_pkg2_feat_branch_name, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg2_commit_parser, ), monorepo=True, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg2_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg2_new_version.tag_format, "version_py_file": monorepo_pkg2_version_py_file.relative_to( monorepo_pkg2_dir ), "commit_message_format": pkg2_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg2_new_version, "dest_files": pkg2_changelog_file_definitions, "commit_ids": [cid_pkg2_feb1_merge], }, }, change_to_pkg2_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_monorepo_w_github_flow_w_feature_release_channel( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_github_flow_monorepo_w_feature_release_channel: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_github_flow_monorepo_w_feature_release_channel: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_github_flow_monorepo_w_feature_release_channel( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_github_flow_monorepo_w_feature_release_channel, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def monorepo_w_github_flow_w_feature_release_channel_conventional_commits( build_monorepo_w_github_flow_w_feature_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = ( monorepo_w_github_flow_w_feature_release_channel_conventional_commits.__name__ ) commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_monorepo_w_github_flow_w_feature_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/monorepos/trunk_based_dev/000077500000000000000000000000001506116242600265335ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/fixtures/monorepos/trunk_based_dev/__init__.py000066400000000000000000000001071506116242600306420ustar00rootroot00000000000000from tests.fixtures.monorepos.trunk_based_dev.monorepo_w_tags import * python-semantic-release-10.4.1/tests/fixtures/monorepos/trunk_based_dev/monorepo_w_tags.py000066400000000000000000000662201506116242600323150ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.commit_parser.conventional.options_monorepo import ( ConventionalCommitMonorepoParserOptions, ) from semantic_release.commit_parser.conventional.parser_monorepo import ( ConventionalCommitMonorepoParser, ) from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActionChangeDirectory, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) @pytest.fixture(scope="session") def deps_files_4_trunk_only_monorepo_w_tags( deps_files_4_example_git_monorepo: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_monorepo, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_trunk_only_monorepo_w_tags( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_trunk_only_monorepo_w_tags: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_trunk_only_monorepo_w_tags) @pytest.fixture(scope="session") def get_repo_definition_4_trunk_only_monorepo_w_tags( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, monorepo_pkg1_changelog_md_file: Path, monorepo_pkg1_changelog_rst_file: Path, monorepo_pkg2_changelog_md_file: Path, monorepo_pkg2_changelog_rst_file: Path, monorepo_pkg1_name: str, monorepo_pkg2_name: str, monorepo_pkg1_dir: Path, monorepo_pkg2_dir: Path, monorepo_pkg1_version_py_file: Path, monorepo_pkg2_version_py_file: Path, stable_now_date: GetStableDateNowFn, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ Builds a Monorepo with trunk-based development only with official releases. Implementation: - The monorepo contains two packages, each with its own internal changelog but shared template. - The repository implements the following git graph: ``` * chore(release): pkg1@0.1.0 [skip ci] (tag: pkg1-v0.1.0, branch: main) * feat(pkg1): file modified outside of pkg 1, identified by scope * chore(release): pkg2@0.1.1 [skip ci] (tag: pkg2-v0.1.1) * fix(pkg2-cli): file modified outside of pkg 2, identified by scope * chore(release): pkg2@0.1.0 [skip ci] (tag: pkg2-v0.1.0) * docs(pkg2-cli): common docs modified outside of pkg 2, identified by scope * test: no pkg scope but add tests to package 2 directory * feat: no pkg scope but file in pkg 2 directory * chore(release): pkg1@0.0.1 [skip ci] (tag: pkg1-v0.0.1) * fix: no pkg scope but file in pkg 1 directory * Initial commit # Includes core functionality for both packages ``` """ def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = default_tag_format_str, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) pkg1_changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": monorepo_pkg1_changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": True, }, { "path": monorepo_pkg1_changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": True, }, ] pkg2_changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": monorepo_pkg2_changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": True, }, { "path": monorepo_pkg2_changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": True, }, ] change_to_pkg1_dir: RepoActionChangeDirectory = { "action": RepoActionStep.CHANGE_DIRECTORY, "details": { "directory": monorepo_pkg1_dir, }, } change_to_pkg2_dir: RepoActionChangeDirectory = { "action": RepoActionStep.CHANGE_DIRECTORY, "details": { "directory": monorepo_pkg2_dir, }, } change_to_example_project_dir: RepoActionChangeDirectory = { "action": RepoActionStep.CHANGE_DIRECTORY, "details": { "directory": "/", }, } if commit_type != "conventional": raise ValueError(f"Unsupported commit type: {commit_type}") pkg1_commit_parser = ConventionalCommitMonorepoParser( options=ConventionalCommitMonorepoParserOptions( parse_squash_commits=True, ignore_merge_commits=ignore_merge_commits, scope_prefix=f"{monorepo_pkg1_name}-?", path_filters=(".",), ) ) pkg2_commit_parser = ConventionalCommitMonorepoParser( options=ConventionalCommitMonorepoParserOptions( parse_squash_commits=pkg1_commit_parser.options.parse_squash_commits, ignore_merge_commits=pkg1_commit_parser.options.ignore_merge_commits, scope_prefix=f"{monorepo_pkg2_name}-?", path_filters=(".",), ) ) common_configs: dict[str, TomlSerializableTypes] = { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": True, "tool.semantic_release.changelog.exclude_commit_patterns": [r"^chore"], "tool.semantic_release.commit_parser": f"{commit_type}-monorepo", "tool.semantic_release.commit_parser_options.parse_squash_commits": pkg1_commit_parser.options.parse_squash_commits, "tool.semantic_release.commit_parser_options.ignore_merge_commits": pkg1_commit_parser.options.ignore_merge_commits, } pkg1_new_version = Version.parse( "0.0.1", tag_format=f"{monorepo_pkg1_name}-{tag_format_str}" ) pkg2_new_version = Version.parse( "0.1.0", tag_format=f"{monorepo_pkg2_name}-{tag_format_str}" ) repo_construction_steps: list[RepoActions] = [ { "action": RepoActionStep.CREATE_MONOREPO, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "post_actions": [ { "action": RepoActionStep.CONFIGURE_MONOREPO, "details": { "package_dir": monorepo_pkg1_dir, "package_name": monorepo_pkg1_name, "tag_format_str": pkg1_new_version.tag_format, "mask_initial_release": mask_initial_release, "extra_configs": { **common_configs, "tool.semantic_release.commit_message": ( pkg1_cmt_msg_format := dedent( f"""\ chore(release): {monorepo_pkg1_name}@{{version}} [skip ci] Automatically generated by python-semantic-release """ ) ), "tool.semantic_release.commit_parser_options.scope_prefix": pkg1_commit_parser.options.scope_prefix, "tool.semantic_release.commit_parser_options.path_filters": pkg1_commit_parser.options.path_filters, **(extra_configs or {}), }, }, }, { "action": RepoActionStep.CONFIGURE_MONOREPO, "details": { "package_dir": monorepo_pkg2_dir, "package_name": monorepo_pkg2_name, "tag_format_str": pkg2_new_version.tag_format, "mask_initial_release": mask_initial_release, "extra_configs": { **common_configs, "tool.semantic_release.commit_message": ( pkg2_cmt_msg_format := dedent( f"""\ chore(release): {monorepo_pkg2_name}@{{version}} [skip ci] Automatically generated by python-semantic-release """ ) ), "tool.semantic_release.commit_parser_options.scope_prefix": pkg2_commit_parser.options.scope_prefix, "tool.semantic_release.commit_parser_options.path_filters": pkg2_commit_parser.options.path_filters, **(extra_configs or {}), }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_c1_initial := "c1_initial_commit" ), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, ], commit_type, # this parser does not matter since the commit is common parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), }, }, ], }, } ] # Make initial release for package 1 repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "pre_actions": [change_to_pkg1_dir], "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c2_pkg1_fix := "c2_pkg1_fix"), "conventional": "fix: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", "emoji": ":bug: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", "scipy": "MAINT: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", "datetime": next(commit_timestamp_gen), }, ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), "post_actions": [change_to_example_project_dir], }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg1_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg1_new_version.tag_format, "version_py_file": monorepo_pkg1_version_py_file.relative_to( monorepo_pkg1_dir ), "commit_message_format": pkg1_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg1_new_version, "dest_files": pkg1_changelog_file_definitions, "commit_ids": [cid_c1_initial, cid_c2_pkg1_fix], }, }, change_to_pkg1_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "pre_actions": [change_to_pkg2_dir], "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c4_pkg2_feat := "c4_pkg2_feat"), "conventional": "feat: no pkg scope but file in pkg 2 directory", "emoji": ":sparkles: no pkg scope but file in pkg 2 directory", "scipy": "ENH: no pkg scope but file in pkg 2 directory", "datetime": next(commit_timestamp_gen), }, { "cid": (cid_c5_pkg2_test := "c5_pkg2_test"), "conventional": "test: no pkg scope but add tests to package 2 directory", "emoji": ":checkmark: no pkg scope but add tests to package 2 directory", "scipy": "TST: no pkg scope but add tests to package 2 directory", "datetime": next(commit_timestamp_gen), }, ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg2_commit_parser, ), monorepo=True, ), "post_actions": [change_to_example_project_dir], }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c6_pkg2_docs := "c6_pkg2_docs"), "conventional": "docs(pkg2-cli): common docs modified outside of pkg 2, identified by scope", "emoji": ":book: (pkg2-cli) common docs modified outside of pkg 2, identified by scope", "scipy": "DOC:pkg2-cli: common docs modified outside of pkg 2, identified by scope", "datetime": next(commit_timestamp_gen), }, ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg2_commit_parser, ), monorepo=True, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg2_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg2_new_version.tag_format, "version_py_file": monorepo_pkg2_version_py_file.relative_to( monorepo_pkg2_dir ), "commit_message_format": pkg2_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg2_new_version, "dest_files": pkg2_changelog_file_definitions, "commit_ids": [ cid_c1_initial, cid_c4_pkg2_feat, cid_c5_pkg2_test, cid_c6_pkg2_docs, ], }, }, change_to_pkg2_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) pkg2_new_version = Version.parse( "0.1.1", tag_format=pkg2_new_version.tag_format ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c8_pkg2_fix := "c8_pkg2_fix"), "conventional": "fix(pkg2-cli): file modified outside of pkg 2, identified by scope", "emoji": ":bug: (pkg2-cli) file modified outside of pkg 2, identified by scope", "scipy": "MAINT:pkg2-cli: file modified outside of pkg 2, identified by scope", "datetime": next(commit_timestamp_gen), }, ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg2_commit_parser, ), monorepo=True, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg2_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg2_new_version.tag_format, "version_py_file": monorepo_pkg2_version_py_file.relative_to( monorepo_pkg2_dir ), "commit_message_format": pkg2_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg2_new_version, "dest_files": pkg2_changelog_file_definitions, "commit_ids": [cid_c8_pkg2_fix], }, }, change_to_pkg2_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) pkg1_new_version = Version.parse( "0.1.0", tag_format=pkg1_new_version.tag_format ) # Add a feature to package 1 and release repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c10_pkg1_feat := "c10_pkg1_feat"), "conventional": "feat(pkg1): file modified outside of pkg 1, identified by scope", "emoji": ":sparkles: (pkg1) file modified outside of pkg 1, identified by scope", "scipy": "ENH:pkg1: file modified outside of pkg 1, identified by scope", "datetime": next(commit_timestamp_gen), }, ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", pkg1_commit_parser, ), monorepo=True, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(pkg1_new_version), "datetime": next(commit_timestamp_gen), "tag_format": pkg1_new_version.tag_format, "version_py_file": monorepo_pkg1_version_py_file.relative_to( monorepo_pkg1_dir ), "commit_message_format": pkg1_cmt_msg_format, "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": pkg1_new_version, "dest_files": pkg1_changelog_file_definitions, "commit_ids": [cid_c10_pkg1_feat], }, }, change_to_pkg1_dir, ], "post_actions": [change_to_example_project_dir], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_trunk_only_monorepo_w_tags( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_monorepo_w_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_trunk_only_monorepo_w_tags: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_trunk_only_monorepo_w_tags( commit_type=commit_type, ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_trunk_only_monorepo_w_tags, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def monorepo_w_trunk_only_releases_conventional_commits( build_trunk_only_monorepo_w_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = monorepo_w_trunk_only_releases_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_monorepo_w_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/repos/000077500000000000000000000000001506116242600225035ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/fixtures/repos/__init__.py000066400000000000000000000003051506116242600246120ustar00rootroot00000000000000from tests.fixtures.repos.git_flow import * from tests.fixtures.repos.github_flow import * from tests.fixtures.repos.repo_initial_commit import * from tests.fixtures.repos.trunk_based_dev import * python-semantic-release-10.4.1/tests/fixtures/repos/git_flow/000077500000000000000000000000001506116242600243155ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/fixtures/repos/git_flow/__init__.py000066400000000000000000000004271506116242600264310ustar00rootroot00000000000000from tests.fixtures.repos.git_flow.repo_w_1_release_channel import * from tests.fixtures.repos.git_flow.repo_w_2_release_channels import * from tests.fixtures.repos.git_flow.repo_w_3_release_channels import * from tests.fixtures.repos.git_flow.repo_w_4_release_channels import * python-semantic-release-10.4.1/tests/fixtures/repos/git_flow/repo_w_1_release_channel.py000066400000000000000000001135441506116242600316020ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Generator, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitMergeCommitMsgFn, GetRepoDefinitionFn, RepoActionGitFFMergeDetails, RepoActionGitMerge, RepoActionGitMergeDetails, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) DEV_BRANCH_NAME = "dev" FEAT_BRANCH_1_NAME = "feat/feature-1" FEAT_BRANCH_2_NAME = "feat/feature-2" FEAT_BRANCH_3_NAME = "feat/feature-3" FEAT_BRANCH_4_NAME = "feat/feature-4" FIX_BRANCH_1_NAME = "fix/patch-1" FIX_BRANCH_2_NAME = "fix/patch-2" @pytest.fixture(scope="session") def deps_files_4_git_flow_repo_w_1_release_channels( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_git_flow_repo_w_1_release_channels( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_git_flow_repo_w_1_release_channels: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_git_flow_repo_w_1_release_channels) @pytest.fixture(scope="session") def get_repo_definition_4_git_flow_repo_w_1_release_channels( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_git: FormatGitMergeCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ This fixture returns a function that when called will define the actions needed to build a git repo that uses the git flow branching strategy and git merge commits with a single release channel 1. official (production) releases (x.x.x) """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) commit_parser = cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ) # Common static actions or components changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ] ff_default_branch_merge_def: RepoActionGitMerge[RepoActionGitFFMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEFAULT_BRANCH_NAME, "fast_forward": True, }, } fast_forward_dev_branch_actions: Sequence[RepoActions] = [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, ff_default_branch_merge_def, ] merge_dev_into_main_gen: Generator[ RepoActionGitMerge[RepoActionGitMergeDetails], None, None ] = ( { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEV_BRANCH_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": f"merge-dev2main-{i}", "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } for i in count(start=1) ) # Define All the steps required to create the repository repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str or default_tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": True, "tool.semantic_release.major_on_zero": True, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, **(extra_configs or {}), }, }, } ) # Make initial release new_version = Version.parse( "0.1.0", tag_format=tag_format_str or default_tag_format_str ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": [ # only one commit to start the main branch convert_commit_spec_to_commit_def( { "cid": (cid_c1_initial := "c1_initial_commit"), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, commit_type, parser=commit_parser, ), ], }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": DEV_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb1_c1_feat := "feat_branch_1_c1_feat" ), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder: RepoActionGitMerge[RepoActionGitMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb1_merge := "feat_branch_1_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_1 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_c1_initial, cid_feb1_c1_feat, cid_feb1_merge, merge_dev_into_main_1["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Add a feature and officially release it new_version = Version.parse("0.2.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb2_c1_feat := "feat_branch_2_c1_feat" ), "conventional": "feat: add a new feature", "emoji": ":sparkles: add a new feature", "scipy": "ENH: add a new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb2_merge := "feat_branch_2_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_2 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb2_c1_feat, cid_feb2_merge, merge_dev_into_main_2["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Add a breaking change feature and officially release it new_version = Version.parse("1.0.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_3_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb3_c1_break_feat := "feat_branch_3_c1_breaking_feature" ), "conventional": str.join( "\n\n", [ "feat: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "emoji": str.join( "\n\n", [ ":boom: add revolutionary feature", "This change is a breaking change", ], ), "scipy": str.join( "\n\n", [ "API: add revolutionary feature", "This is a breaking change", ], ), "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_3_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb3_merge := "feat_branch_3_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_3 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb3_c1_break_feat, cid_feb3_merge, merge_dev_into_main_3["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Make a fix and officially release new_version = Version.parse("1.0.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_fib1_c1_fix := "fix_branch_1_c1_fix"), "conventional": "fix: correct a bug\n\nCloses: #123\n", "emoji": ":bug: correct a bug\n\nCloses: #123\n", "scipy": "BUG: correct a bug\n\nCloses: #123\n", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_fib1_merge := "fix_branch_1_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_4 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_fib1_c1_fix, cid_fib1_merge, merge_dev_into_main_4["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Make a fix and Add multiple feature changes before officially releasing new_version = Version.parse("1.1.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_fib2_c1_fix := "fix_branch_2_c1_fix"), "conventional": "fix: correct another bug", "emoji": ":bug: correct another bug", "scipy": "BUG: correct another bug", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_fib2_merge := "fix_branch_2_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_4_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb4_c1_feat := "feat_branch_4_c1_feat" ), "conventional": "feat(cli): add new config cli command", "emoji": ":sparkles: (cli) add new config cli command", "scipy": "ENH: cli: add new config cli command", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_4_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb4_merge := "feat_branch_4_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_4_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_5 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_fib2_c1_fix, cid_fib2_merge, cid_feb4_c1_feat, cid_feb4_merge, merge_dev_into_main_5["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_git_flow_repo_w_1_release_channels( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_git_flow_repo_w_1_release_channels: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_git_flow_repo_w_1_release_channels: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_git_flow_repo_w_1_release_channels( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_git_flow_repo_w_1_release_channels, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_git_flow_conventional_commits( build_git_flow_repo_w_1_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_1_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_emoji_commits( build_git_flow_repo_w_1_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_1_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_scipy_commits( build_git_flow_repo_w_1_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_1_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/repos/git_flow/repo_w_2_release_channels.py000066400000000000000000001216301506116242600317610ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Generator, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitMergeCommitMsgFn, GetRepoDefinitionFn, RepoActionGitFFMergeDetails, RepoActionGitMerge, RepoActionGitMergeDetails, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) DEV_BRANCH_NAME = "dev" FEAT_BRANCH_1_NAME = "feat/feature-1" FEAT_BRANCH_2_NAME = "feat/feature-2" FEAT_BRANCH_3_NAME = "feat/feature-3" FEAT_BRANCH_4_NAME = "feat/feature-4" FIX_BRANCH_1_NAME = "fix/patch-1" @pytest.fixture(scope="session") def deps_files_4_git_flow_repo_w_2_release_channels( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_git_flow_repo_w_2_release_channels( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_git_flow_repo_w_2_release_channels: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_git_flow_repo_w_2_release_channels) @pytest.fixture(scope="session") def get_repo_definition_4_git_flow_repo_w_2_release_channels( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_git: FormatGitMergeCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ This fixture returns a function that when called will define the actions needed to build a git repo that uses the git flow branching strategy and git merge commits with 2 release channels 1. alpha feature releases (x.x.x-alpha.x) 2. official (production) releases (x.x.x) """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) commit_parser = cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ) # Common static actions or components changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ] ff_default_branch_merge_def: RepoActionGitMerge[RepoActionGitFFMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEFAULT_BRANCH_NAME, "fast_forward": True, }, } fast_forward_dev_branch_actions: Sequence[RepoActions] = [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, ff_default_branch_merge_def, ] merge_dev_into_main_gen: Generator[ RepoActionGitMerge[RepoActionGitMergeDetails], None, None ] = ( { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEV_BRANCH_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": f"merge-dev2main-{i}", "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } for i in count(start=1) ) # Define All the steps required to create the repository repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str or default_tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, # branch "feature" has prerelease suffix of "alpha" "tool.semantic_release.branches.features": { "match": r"^feat/.+", "prerelease": True, "prerelease_token": "alpha", }, "tool.semantic_release.allow_zero_version": True, "tool.semantic_release.major_on_zero": True, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, **(extra_configs or {}), }, }, } ) # Make initial release new_version = Version.parse( "0.1.0", tag_format=tag_format_str or default_tag_format_str ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": [ # only one commit to start the main branch convert_commit_spec_to_commit_def( { "cid": (cid_c1_initial := "c1_initial_commit"), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, commit_type, parser=commit_parser, ), ], }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": DEV_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb1_c1_feat := "feat_branch_1_c1_feat" ), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder: RepoActionGitMerge[RepoActionGitMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb1_merge := "feat_branch_1_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_1 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_c1_initial, cid_feb1_c1_feat, cid_feb1_merge, merge_dev_into_main_1["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Add a feature and release it as an alpha release new_version = Version.parse("0.2.0-alpha.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb2_c1_feat := "feat_branch_2_c1_feat" ), "conventional": "feat: add a new feature", "emoji": ":sparkles: add a new feature", "scipy": "ENH: add a new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb2_c1_feat, ], }, }, ], }, }, ] ) # Add a feature and release it as an alpha release new_version = Version.parse("1.0.0-alpha.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb2_c2_break_feat := "feat_branch_2_c2_breaking_feat" ), "conventional": str.join( "\n\n", [ "feat: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "emoji": str.join( "\n\n", [ ":boom: add revolutionary feature", "This change is a breaking change", ], ), "scipy": str.join( "\n\n", [ "API: add revolutionary feature", "This is a breaking change", ], ), "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb2_c2_break_feat, ], }, }, ], }, }, ] ) # Add another feature and officially release new_version = Version.parse("1.0.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb2_c3_feat := "feat_branch_2_c3_feat" ), "conventional": "feat: add some more text", "emoji": ":sparkles: add some more text", "scipy": "ENH: add some more text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb2_merge := "feat_branch_2_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_1 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb2_c3_feat, cid_feb2_merge, merge_dev_into_main_1["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Add another feature and officially release (no intermediate alpha release) new_version = Version.parse("1.1.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_3_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb3_c1_feat := "feat_branch_3_c1_feat" ), "conventional": "feat(cli): add new config cli command", "emoji": ":sparkles: (cli) add new config cli command", "scipy": "ENH: cli: add new config cli command", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_3_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb3_merge := "feat_branch_3_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_2 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb3_c1_feat, cid_feb3_merge, merge_dev_into_main_2["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Make a fix and officially release new_version = Version.parse("1.1.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_fib1_c1_fix := "fix_branch_1_c1_fix"), "conventional": "fix(config): fixed configuration generation\n\nCloses: #123", "emoji": ":bug: (config) fixed configuration generation\n\nCloses: #123", "scipy": "MAINT:config: fixed configuration generation\n\nCloses: #123", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_fib1_merge := "fix_branch_1_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_3 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_fib1_c1_fix, cid_fib1_merge, merge_dev_into_main_3["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Introduce a new feature and create a prerelease for it new_version = Version.parse("1.2.0-alpha.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_4_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb4_c1_feat := "feat_branch_4_c1_feat" ), "conventional": "feat: add some more text", "emoji": ":sparkles: add some more text", "scipy": "ENH: add some more text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb4_c1_feat, ], }, }, ], }, }, ] ) # Fix the previous alpha & add additional feature and create a subsequent prerelease for it new_version = Version.parse("1.2.0-alpha.2", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_feb4_c2_fix := "feat_branch_4_c2_fix"), "conventional": "fix(scope): correct some text", "emoji": ":bug: (scope) correct some text", "scipy": "MAINT:scope: correct some text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, { "cid": ( cid_feb4_c3_feat := "feat_branch_4_c3_feat" ), "conventional": "feat(scope): add some more text", "emoji": ":sparkles:(scope) add some more text", "scipy": "ENH: scope: add some more text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb4_c2_fix, cid_feb4_c3_feat, ], }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_git_flow_repo_w_2_release_channels( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_git_flow_repo_w_2_release_channels: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_git_flow_repo_w_2_release_channels: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_git_flow_repo_w_2_release_channels( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_git_flow_repo_w_2_release_channels, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_git_flow_w_alpha_prereleases_n_conventional_commits( build_git_flow_repo_w_2_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_2_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_alpha_prereleases_n_emoji_commits( build_git_flow_repo_w_2_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_2_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_alpha_prereleases_n_scipy_commits( build_git_flow_repo_w_2_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_2_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/repos/git_flow/repo_w_3_release_channels.py000066400000000000000000001277121506116242600317710ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Generator, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitMergeCommitMsgFn, GetRepoDefinitionFn, RepoActionGitFFMergeDetails, RepoActionGitMerge, RepoActionGitMergeDetails, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) DEV_BRANCH_NAME = "dev" FEAT_BRANCH_1_NAME = "feat/feature-1" FEAT_BRANCH_2_NAME = "feat/feature-2" FEAT_BRANCH_3_NAME = "feat/feature-3" FEAT_BRANCH_4_NAME = "feat/feature-4" FIX_BRANCH_1_NAME = "fix/patch-1" FIX_BRANCH_2_NAME = "fix/patch-2" @pytest.fixture(scope="session") def deps_files_4_git_flow_repo_w_3_release_channels( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_git_flow_repo_w_3_release_channels( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_git_flow_repo_w_3_release_channels: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_git_flow_repo_w_3_release_channels) @pytest.fixture(scope="session") def get_repo_definition_4_git_flow_repo_w_3_release_channels( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_git: FormatGitMergeCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ This fixture returns a function that when called will define the actions needed to build a git repo that uses the git flow branching strategy and git merge commits with 2 release channels 1. alpha feature releases (x.x.x-alpha.x) 2. release candidate releases (x.x.x-rc.x) 3. official (production) releases (x.x.x) """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) commit_parser = cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ) # Common static actions or components changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ] ff_default_branch_merge_def: RepoActionGitMerge[RepoActionGitFFMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEFAULT_BRANCH_NAME, "fast_forward": True, }, } fast_forward_dev_branch_actions: Sequence[RepoActions] = [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, ff_default_branch_merge_def, ] merge_dev_into_main_gen: Generator[ RepoActionGitMerge[RepoActionGitMergeDetails], None, None ] = ( { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEV_BRANCH_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": f"merge-dev2main-{i}", "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } for i in count(start=1) ) # Define All the steps required to create the repository repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str or default_tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, # branch "dev" has prerelease suffix of "rc" "tool.semantic_release.branches.dev": { "match": r"^dev$", "prerelease": True, "prerelease_token": "rc", }, # branch "feature" has prerelease suffix of "alpha" "tool.semantic_release.branches.features": { "match": r"^feat/.+", "prerelease": True, "prerelease_token": "alpha", }, "tool.semantic_release.allow_zero_version": True, "tool.semantic_release.major_on_zero": True, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, **(extra_configs or {}), }, }, } ) # Make initial release new_version = Version.parse( "0.1.0", tag_format=tag_format_str or default_tag_format_str ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": [ # only one commit to start the main branch convert_commit_spec_to_commit_def( { "cid": (cid_c1_initial := "c1_initial_commit"), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, commit_type, parser=commit_parser, ), ], }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": DEV_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb1_c1_feat := "feat_branch_1_c1_feat" ), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder: RepoActionGitMerge[RepoActionGitMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb1_merge := "feat_branch_1_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_1 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_c1_initial, cid_feb1_c1_feat, cid_feb1_merge, merge_dev_into_main_1["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Add a feature and release it as an alpha release new_version = Version.parse("0.2.0-alpha.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb2_c1_feat := "feat_branch_2_c1_feat" ), "conventional": "feat: add a new feature", "emoji": ":sparkles: add a new feature", "scipy": "ENH: add a new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb2_c1_feat, ], }, }, ], }, }, ] ) # Make a breaking feature change and release it as an alpha release new_version = Version.parse("1.0.0-alpha.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb2_c2_break_feat := "feat_branch_2_c2_break_feat" ), "conventional": str.join( "\n\n", [ "feat: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "emoji": str.join( "\n\n", [ ":boom: add revolutionary feature", "This change is a breaking change", ], ), "scipy": str.join( "\n\n", [ "API: add revolutionary feature", "This is a breaking change", ], ), "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb2_c2_break_feat, ], }, }, ], }, }, ] ) # Merge in the successful alpha release and create a release candidate new_version = Version.parse("1.0.0-rc.1", tag_format=new_version.tag_format) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb2_merge := "feat_branch_2_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb2_merge, ], }, }, ], }, }, ] ) # officially release the sucessful release candidate to production new_version = Version.parse("1.0.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_2 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ merge_dev_into_main_2["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Add a feature and release it as an alpha release new_version = Version.parse("1.1.0-alpha.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_3_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb3_c1_feat := "feat_branch_3_c1_feat" ), "conventional": "feat(cli): add new config cli command", "emoji": ":sparkles: (cli) add new config cli command", "scipy": "ENH:cli: add new config cli command", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb3_c1_feat, ], }, }, ], }, }, ] ) # Add another feature and release it as subsequent alpha release new_version = Version.parse("1.1.0-alpha.2", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb3_c2_feat := "feat_branch_3_c2_feat" ), "conventional": "feat(config): add new config option", "emoji": ":sparkles: (config) add new config option", "scipy": "ENH: config: add new config option", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb3_c2_feat, ], }, }, ], }, }, ] ) # Merge in the successful alpha release, add a fix, and create a release candidate new_version = Version.parse("1.1.0-rc.1", tag_format=new_version.tag_format) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_3_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb3_merge2dev := "feat_branch_3_merge2dev"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_fib1_c1_fix := "fix_branch_1_c1_fix"), "conventional": "fix(cli): fix config cli command", "emoji": ":bug: (cli) fix config cli command", "scipy": "BUG:cli: fix config cli command", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_fib1_merge2dev := "fix_branch_1_merge2dev"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb3_merge2dev, cid_fib1_c1_fix, cid_fib1_merge2dev, ], }, }, ], }, }, ] ) # fix another bug from the release candidate and create a new release candidate new_version = Version.parse("1.1.0-rc.2", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_fib2_c1_fix := "fix_branch_2_c1"), "conventional": "fix(config): fix config option\n\nImplements: #123\n", "emoji": ":bug: (config) fix config option\n\nImplements: #123\n", "scipy": "BUG: config: fix config option\n\nImplements: #123\n", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_fib2_merge2dev := "fix_branch_2_merge2dev"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_fib2_c1_fix, cid_fib2_merge2dev, ], }, }, ], }, }, ] ) # officially release the sucessful release candidate to production new_version = Version.parse("1.1.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_dev_into_main_3 := next(merge_dev_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ merge_dev_into_main_3["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_git_flow_repo_w_3_release_channels( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_git_flow_repo_w_3_release_channels: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_git_flow_repo_w_3_release_channels: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_git_flow_repo_w_3_release_channels( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_git_flow_repo_w_3_release_channels, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_git_flow_repo_w_3_release_channels: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_git_flow_repo_w_3_release_channels: str, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_git_flow_repo_w_3_release_channels( commit_type="conventional", tag_format_str="submod-v{version}", ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) repo_name = repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__ build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_git_flow_repo_w_3_release_channels, build_repo_func=_build_repo, dest_dir=example_project_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return { "definition": cached_repo_data["build_definition"], "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits( build_git_flow_repo_w_3_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_3_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits( build_git_flow_repo_w_3_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_3_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits( build_git_flow_repo_w_3_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_3_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/repos/git_flow/repo_w_4_release_channels.py000066400000000000000000001202301506116242600317560ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Generator, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitMergeCommitMsgFn, GetRepoDefinitionFn, RepoActionGitFFMergeDetails, RepoActionGitMerge, RepoActionGitMergeDetails, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) BETA_BRANCH_NAME = "beta" DEV_BRANCH_NAME = "dev" FEAT_BRANCH_1_NAME = "feat/feature-1" FEAT_BRANCH_2_NAME = "feat/feature-2" FEAT_BRANCH_3_NAME = "feat/feature-3" FEAT_BRANCH_4_NAME = "feat/feature-4" FIX_BRANCH_1_NAME = "fix/patch-1" FIX_BRANCH_2_NAME = "fix/patch-2" @pytest.fixture(scope="session") def deps_files_4_git_flow_repo_w_4_release_channels( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_git_flow_repo_w_4_release_channels( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_git_flow_repo_w_4_release_channels: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_git_flow_repo_w_4_release_channels) @pytest.fixture(scope="session") def get_repo_definition_4_git_flow_repo_w_4_release_channels( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_git: FormatGitMergeCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ This fixture returns a function that when called will define the actions needed to build a git repo that uses the git flow branching strategy and git merge commits with 4 release channels. This very complex repository mirrors the git flow example provided by a user in issue [#789](https://github.com/python-semantic-release/python-semantic-release/issues/789). 1. [feature branches] revision releases which include build-metadata of the branch name (slightly differs from user where the release also used alpha+build-metadata) 2. [dev branch] alpha feature releases (x.x.x-alpha.x) 3. [beta branch] beta releases (x.x.x-beta.x) 4. [main branch] official (production) releases (x.x.x) """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) commit_parser = cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ) # Common static actions or components changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ] ff_beta_branch_merge_def: RepoActionGitMerge[RepoActionGitFFMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": BETA_BRANCH_NAME, "fast_forward": True, }, } fast_forward_dev_branch_actions: Sequence[RepoActions] = [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, ff_beta_branch_merge_def, ] ff_main_branch_merge_def: RepoActionGitMerge[RepoActionGitFFMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEFAULT_BRANCH_NAME, "fast_forward": True, }, } fast_forward_beta_branch_actions: Sequence[RepoActions] = [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": BETA_BRANCH_NAME}, }, ff_main_branch_merge_def, ] merge_dev_into_beta_gen: Generator[ RepoActionGitMerge[RepoActionGitMergeDetails], None, None ] = ( { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEV_BRANCH_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": f"merge-dev2beta-{i}", "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=BETA_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } for i in count(start=1) ) merge_beta_into_main_gen: Generator[ RepoActionGitMerge[RepoActionGitMergeDetails], None, None ] = ( { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": BETA_BRANCH_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": f"merge-beta2main-{i}", "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=BETA_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } for i in count(start=1) ) # Define All the steps required to create the repository repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str or default_tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": rf"^{DEFAULT_BRANCH_NAME}$", "prerelease": False, }, # branch "beta" has prerelease suffix of "beta" "tool.semantic_release.branches.beta": { "match": rf"^{BETA_BRANCH_NAME}$", "prerelease": True, "prerelease_token": "beta", }, # branch "development" has prerelease suffix of "alpha" "tool.semantic_release.branches.dev": { "match": rf"^{DEV_BRANCH_NAME}$", "prerelease": True, "prerelease_token": "alpha", }, # branch "feat/*" has prerelease suffix of "rev" "tool.semantic_release.branches.features": { "match": r"^feat/.+", "prerelease": True, "prerelease_token": "rev", }, "tool.semantic_release.allow_zero_version": False, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, **(extra_configs or {}), }, }, } ) # Make initial release new_version = Version.parse( "1.0.0", tag_format=(tag_format_str or default_tag_format_str) ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": [ # only one commit to start the main branch convert_commit_spec_to_commit_def( { "cid": ( cid_db_c1_initial := "db_c1_initial_commit" ), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, commit_type, parser=commit_parser, ), ], }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": BETA_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": DEV_BRANCH_NAME, "start_branch": BETA_BRANCH_NAME, }, }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_feb1_c1_feat := "feat_branch1_c1_feat"), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder: RepoActionGitMerge[RepoActionGitMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb1_merge2dev := "feat_branch1_merge2dev"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": BETA_BRANCH_NAME}, }, { **(merge_dev_into_beta_1 := next(merge_dev_into_beta_gen)), }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_beta_into_main_1 := next(merge_beta_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_db_c1_initial, cid_feb1_c1_feat, cid_feb1_merge2dev, merge_dev_into_beta_1["details"]["commit_def"][ "cid" ], merge_beta_into_main_1["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Make a fix and release it as an alpha release new_version = Version.parse("1.0.1-alpha.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_beta_branch_actions, *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_fib1_c1_fix := "fix_branch_1_c1_fix"), "conventional": "fix(cli): fix config cli command\n\nCloses: #123\n", "emoji": ":bug: (cli) fix config cli command\n\nCloses: #123\n", "scipy": "BUG:cli: fix config cli command\n\nCloses: #123\n", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_fib1_merge2dev := "fix_branch_1_merge_2_dev"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_fib1_c1_fix, cid_fib1_merge2dev, ], }, }, ], }, }, ] ) # Merge in the successful alpha release and create a beta release new_version = Version.parse("1.0.1-beta.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": BETA_BRANCH_NAME}, }, { **(merge_dev_into_beta_2 := next(merge_dev_into_beta_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ merge_dev_into_beta_2["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Fix a bug found in beta release and create a new alpha release new_version = Version.parse("1.0.1-alpha.2", tag_format=new_version.tag_format) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_fib2_c1_fix := "fix_branch_2_c1_fix"), "conventional": "fix(config): fix config option", "emoji": ":bug: (config) fix config option", "scipy": "BUG: config: fix config option", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_fib2_merge2dev := "fix_branch_2_merge_2_dev"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_fib2_c1_fix, cid_fib2_merge2dev, ], }, }, ], }, }, ] ) # Merge in the 2nd successful alpha release and create a secondary beta release new_version = Version.parse("1.0.1-beta.2", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": BETA_BRANCH_NAME}, }, { **(merge_dev_into_beta_3 := next(merge_dev_into_beta_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ merge_dev_into_beta_3["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # Add a new feature (another developer was working on) and create a release for it # Based on Semver standard, Build metadata is restricted to [A-Za-z0-9-] so we replace the '/' with a '-' new_version = Version.parse( f"""1.1.0-rev.1+{FEAT_BRANCH_2_NAME.replace("/", '-')}""", tag_format=new_version.tag_format, ) repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb2_c1_feat := "feat_branch_2_c1_feat" ), "conventional": "feat(feat-2): add another primary feature", "emoji": ":sparkles: (feat-2) add another primary feature", "scipy": "ENH: feat-2: add another primary feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb2_c1_feat, ], }, }, ], }, }, ] ) # Merge in the successful revision release and create an alpha release new_version = Version.parse("1.1.0-alpha.1", tag_format=new_version.tag_format) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb2_merge2dev := "feat_branch_2_merge_2_dev"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb2_merge2dev, ], }, }, ], }, }, ] ) # Merge in the successful alpha release and create a beta release new_version = Version.parse("1.1.0-beta.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": BETA_BRANCH_NAME}, }, { **(merge_dev_into_beta_4 := next(merge_dev_into_beta_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ merge_dev_into_beta_4["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) # officially release the sucessful release candidate to production new_version = Version.parse("1.1.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **(merge_beta_into_main_2 := next(merge_beta_into_main_gen)), }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ merge_beta_into_main_2["details"]["commit_def"][ "cid" ], ], }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_git_flow_repo_w_4_release_channels( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_git_flow_repo_w_4_release_channels: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_git_flow_repo_w_4_release_channels: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_git_flow_repo_w_4_release_channels( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_git_flow_repo_w_4_release_channels, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits( build_git_flow_repo_w_4_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = ( repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits.__name__ ) commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_4_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits( build_git_flow_repo_w_4_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_4_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits( build_git_flow_repo_w_4_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_4_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/repos/github_flow/000077500000000000000000000000001506116242600250145ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/fixtures/repos/github_flow/__init__.py000066400000000000000000000003511506116242600271240ustar00rootroot00000000000000from tests.fixtures.repos.github_flow.repo_w_default_release import * from tests.fixtures.repos.github_flow.repo_w_default_release_w_branch_update_merge import * from tests.fixtures.repos.github_flow.repo_w_release_channels import * python-semantic-release-10.4.1/tests/fixtures/repos/github_flow/repo_w_default_release.py000066400000000000000000000525461506116242600321010ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, CommitSpec, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitHubSquashCommitMsgFn, GetRepoDefinitionFn, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) FIX_BRANCH_1_NAME = "fix/patch-1" FEAT_BRANCH_1_NAME = "feat/feature-1" @pytest.fixture(scope="session") def deps_files_4_github_flow_repo_w_default_release_channel( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_github_flow_repo_w_default_release_channel( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_github_flow_repo_w_default_release_channel: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files( deps_files_4_github_flow_repo_w_default_release_channel ) @pytest.fixture(scope="session") def get_repo_definition_4_github_flow_repo_w_default_release_channel( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_squash_commit_msg_github: FormatGitHubSquashCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ Builds a repository with the GitHub Flow branching strategy and a squash commit merging strategy for a single release channel on the default branch. """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) pr_num_gen = (i for i in count(start=2, step=1)) commit_parser = cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ) changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str or default_tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": False, "tool.semantic_release.commit_parser_options.parse_squash_commits": True, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, **(extra_configs or {}), }, }, } ) new_version = Version.parse( "1.0.0", tag_format=(tag_format_str or default_tag_format_str) ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_db_c1_initial := "db_c1_initial_commit" ), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "cid": (cid_db_c2_feat := "db_c2_feat"), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_db_c1_initial, cid_db_c2_feat], }, }, ], }, }, ] ) fix_branch_1_commits: Sequence[CommitSpec] = [ { "cid": "fix_branch_c1", "conventional": "fix(cli): add missing text\n\nResolves: #123\n", "emoji": ":bug: add missing text\n\nResolves: #123\n", "scipy": "MAINT: add missing text\n\nResolves: #123\n", "datetime": next(commit_timestamp_gen), }, ] repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { **commit, "include_in_changelog": False, } for commit in fix_branch_1_commits ], commit_type, parser=commit_parser, ), }, }, ] ) # simulate separate work by another person at same time as the fix branch feat_branch_1_commits: Sequence[CommitSpec] = [ { "cid": "feat_branch_c1", "conventional": "feat(cli): add cli interface", "emoji": ":sparkles: add cli interface", "scipy": "ENH: add cli interface", "datetime": next(commit_timestamp_gen), }, { "cid": "feat_branch_c2", "conventional": "test(cli): add cli tests", "emoji": ":checkmark: add cli tests", "scipy": "TST: add cli tests", "datetime": next(commit_timestamp_gen), }, { "cid": "feat_branch_c3", "conventional": "docs(cli): add cli documentation", "emoji": ":memo: add cli documentation", "scipy": "DOC: add cli documentation", "datetime": next(commit_timestamp_gen), }, ] repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { **commit, "include_in_changelog": False, } for commit in feat_branch_1_commits ], commit_type, parser=commit_parser, ) }, }, ] ) new_version = Version.parse("1.0.1", tag_format=new_version.tag_format) all_commit_types: list[CommitConvention] = ["conventional", "emoji", "scipy"] fix_branch_pr_number = next(pr_num_gen) cid_fix_branch_squash_base = "fix_branch_1_squash" cid_fix_branch_squash_gen = ( f"{cid_fix_branch_squash_base}-{i}" for i in count(start=1) ) fix_branch_squash_commit_spec: CommitSpec = { **{ # type: ignore[typeddict-item] cmt_type: format_squash_commit_msg_github( # Use the primary commit message as the PR title pr_title=fix_branch_1_commits[0][cmt_type], pr_number=fix_branch_pr_number, # No squashed commits since there is only one commit squashed_commits=[ cmt[commit_type] for cmt in fix_branch_1_commits[1:] ], ) for cmt_type in all_commit_types }, "cid": cid_fix_branch_squash_base, "datetime": next(commit_timestamp_gen), "include_in_changelog": True, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_SQUASH, "details": { "branch": FIX_BRANCH_1_NAME, "strategy_option": "theirs", "commit_def": convert_commit_spec_to_commit_def( fix_branch_squash_commit_spec, commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ next(cid_fix_branch_squash_gen) for _ in range(len(fix_branch_1_commits)) ], }, }, ], }, }, ] ) feat_branch_pr_number = next(pr_num_gen) cid_feat_branch_squash_base = "feat_branch_1_squash" cid_feat_branch_squash_gen = ( f"{cid_feat_branch_squash_base}-{i}" for i in count(start=1) ) feat_branch_squash_commit_spec: CommitSpec = { **{ # type: ignore[typeddict-item] cmt_type: format_squash_commit_msg_github( # Use the primary commit message as the PR title pr_title=feat_branch_1_commits[0][cmt_type], pr_number=feat_branch_pr_number, squashed_commits=[ cmt[commit_type] for cmt in feat_branch_1_commits ], ) for cmt_type in all_commit_types }, "cid": cid_feat_branch_squash_base, "datetime": next(commit_timestamp_gen), "include_in_changelog": True, } new_version = Version.parse("1.1.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_SQUASH, "details": { "branch": FEAT_BRANCH_1_NAME, "strategy_option": "theirs", "commit_def": convert_commit_spec_to_commit_def( feat_branch_squash_commit_spec, commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ next(cid_feat_branch_squash_gen) for _ in range(len(feat_branch_1_commits) + 1) ], }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_repo_w_github_flow_w_default_release_channel( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_github_flow_repo_w_default_release_channel: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_github_flow_repo_w_default_release_channel: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_github_flow_repo_w_default_release_channel( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_github_flow_repo_w_default_release_channel, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_github_flow_w_default_release_channel_conventional_commits( build_repo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = ( repo_w_github_flow_w_default_release_channel_conventional_commits.__name__ ) commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_default_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_github_flow_w_default_release_channel_emoji_commits( build_repo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_github_flow_w_default_release_channel_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_default_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_github_flow_w_default_release_channel_scipy_commits( build_repo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_github_flow_w_default_release_channel_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_default_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } repo_w_default_release_w_branch_update_merge.py000066400000000000000000000542351506116242600364030ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/fixtures/repos/github_flowfrom __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ( ExProjectDir, ) from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitHubMergeCommitMsgFn, FormatGitMergeCommitMsgFn, GetRepoDefinitionFn, RepoActionGitMerge, RepoActionGitMergeDetails, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) FEAT_BRANCH_1_NAME = "feat/feature-1" FEAT_BRANCH_2_NAME = "feat/feature-2" @pytest.fixture(scope="session") def deps_files_4_repo_w_default_release_n_branch_update_merge( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_w_default_release_n_branch_update_merge( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_w_default_release_n_branch_update_merge: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files( deps_files_4_repo_w_default_release_n_branch_update_merge ) @pytest.fixture(scope="session") def get_repo_definition_4_github_flow_repo_w_default_release_n_branch_update_merge( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_git: FormatGitMergeCommitMsgFn, format_merge_commit_msg_github: FormatGitHubMergeCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ This fixture provides a function that builds a repository definition for a trunk-based development where a release in the default branch is made in parallel to a work in a feature branch, feature branch is updated with the latest changes from the default branch and them merged back into the default branch with a release. It is the minimal reproducible example of the issue [#1252](https://github.com/python-semantic-release/python-semantic-release/issues/1252). """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() pr_num_gen = (i for i in count(start=2, step=1)) commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) commit_parser = cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ) changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str or default_tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": False, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, **(extra_configs or {}), }, }, } ) # Make initial release new_version = Version.parse( "1.0.0", tag_format=(tag_format_str or default_tag_format_str) ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_db_c1_initial := "db_c1_initial_commit" ), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "cid": (cid_db_c2_feat := "db_c2_feat"), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_db_c1_initial, cid_db_c2_feat], }, }, ], }, }, ] ) # Create a feature branch & make a commit (separate developer, slower activity) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb1_c1_feat := "feat_branch_1_c1_feat" ), "conventional": "feat: add new feature in the feature branch", "emoji": ":sparkles: add new feature in the feature branch", "scipy": "ENH: add new feature in the feature branch", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) # Create a 2nd feature branch & make a commit (separate developer, faster activity) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_2_NAME, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb2_c1_feat := "feat_branch_2_c1_feat" ), "conventional": "feat: add a faster feature", "emoji": ":sparkles: add a faster feature", "scipy": "ENH: add a faster feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, ] ) # Merge feature branch 2 into default branch and release (faster activity is complete) new_version = Version.parse("1.1.0", tag_format=new_version.tag_format) merge_def_type_placeholder: RepoActionGitMerge[RepoActionGitMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb2_merge := "feat_branch_2_merge"), "conventional": ( merge_msg := format_merge_commit_msg_github( pr_number=next(pr_num_gen), branch_name=FEAT_BRANCH_2_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_feb2_c1_feat, cid_feb2_merge], }, }, ], }, }, ] ) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEFAULT_BRANCH_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb1_update_merge := "feat_branch_1_update_merge"), "conventional": ( merge_msg := format_merge_commit_msg_git( branch_name=DEFAULT_BRANCH_NAME, tgt_branch_name=FEAT_BRANCH_1_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } # Merge default branch into the feature branch to keep it up to date repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": FEAT_BRANCH_1_NAME}, }, merge_def_type_placeholder, ] ) # Merge the feature branch into the default branch and make a release new_version = Version.parse("1.2.0", tag_format=new_version.tag_format) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb1_merge := "feat_branch_1_release_merge"), "conventional": ( merge_msg := format_merge_commit_msg_github( pr_number=next(pr_num_gen), branch_name=FEAT_BRANCH_1_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_feb1_c1_feat, cid_feb1_update_merge, cid_feb1_merge, ], }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_github_flow_repo_w_default_release_n_branch_update_merge( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_github_flow_repo_w_default_release_n_branch_update_merge: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_default_release_n_branch_update_merge: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_github_flow_repo_w_default_release_n_branch_update_merge( commit_type=commit_type, ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_default_release_n_branch_update_merge, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_github_flow_w_default_release_n_branch_update_merge_conventional_commits( build_github_flow_repo_w_default_release_n_branch_update_merge: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_github_flow_w_default_release_n_branch_update_merge_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_github_flow_repo_w_default_release_n_branch_update_merge( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_github_flow_w_default_release_n_branch_update_merge_emoji_commits( build_github_flow_repo_w_default_release_n_branch_update_merge: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_github_flow_w_default_release_n_branch_update_merge_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_github_flow_repo_w_default_release_n_branch_update_merge( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_github_flow_w_default_release_n_branch_update_merge_scipy_commits( build_github_flow_repo_w_default_release_n_branch_update_merge: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_github_flow_w_default_release_n_branch_update_merge_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_github_flow_repo_w_default_release_n_branch_update_merge( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/repos/github_flow/repo_w_release_channels.py000066400000000000000000000574421506116242600322500ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitHubMergeCommitMsgFn, GetRepoDefinitionFn, RepoActionGitMerge, RepoActionGitMergeDetails, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) FIX_BRANCH_1_NAME = "fix/patch-1" FEAT_BRANCH_1_NAME = "feat/feature-1" FEAT_BRANCH_2_NAME = "feat/feature-2" @pytest.fixture(scope="session") def deps_files_4_github_flow_repo_w_feature_release_channel( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_github_flow_repo_w_feature_release_channel( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_github_flow_repo_w_feature_release_channel: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files( deps_files_4_github_flow_repo_w_feature_release_channel ) @pytest.fixture(scope="session") def get_repo_definition_4_github_flow_repo_w_feature_release_channel( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_github: FormatGitHubMergeCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ Builds a repository with the GitHub Flow branching strategy using merge commits for alpha feature releases and official releases on the default branch. """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) pr_num_gen = (i for i in count(start=2, step=1)) commit_parser = cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ) changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str or default_tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, # branch "feat/" & "fix/" has prerelease suffix of "alpha" "tool.semantic_release.branches.alpha-release": { "match": r"^(feat|fix)/.+", "prerelease": True, "prerelease_token": "alpha", }, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, "tool.semantic_release.allow_zero_version": False, **(extra_configs or {}), }, }, } ) # Make initial release new_version = Version.parse( "1.0.0", tag_format=(tag_format_str or default_tag_format_str) ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_db_initial := "db_c1_initial_commit"), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "cid": (cid_db_c2_feat := "db_c2_feat"), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_db_initial, cid_db_c2_feat], }, }, ], }, }, ] ) # Make a fix and release it as an alpha release new_version = Version.parse("1.0.1-alpha.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_fib1_c1_fix := "fix_branch_c1_fix"), "conventional": "fix: correct some text\n\nResolves: #123", "emoji": ":bug: correct some text\n\nResolves: #123", "scipy": "MAINT: correct some text\n\nResolves: #123", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_fib1_c1_fix], }, }, ], }, }, ] ) # Update the fix and release another alpha release new_version = Version.parse("1.0.1-alpha.2", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_fib1_c2_fix := "fix_branch_1_c2_fix"), "conventional": "fix: adjust text to resolve", "emoji": ":bug: adjust text to resolve", "scipy": "MAINT: adjust text to resolve", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_fib1_c2_fix], }, }, ], }, }, ] ) # Merge the fix branch into the default branch and formally release it new_version = Version.parse("1.0.1", tag_format=new_version.tag_format) merge_def_type_placeholder: RepoActionGitMerge[RepoActionGitMergeDetails] = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_fib1_merge := "fix_branch_1_merge"), "conventional": ( merge_msg := format_merge_commit_msg_github( pr_number=next(pr_num_gen), branch_name=FIX_BRANCH_1_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_fib1_merge], }, }, ], }, }, ] ) # Make a feature branch and release it as an alpha release new_version = Version.parse("1.1.0-alpha.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": ( cid_feb1_c1_feat := "feat_branch_1_c1_feat" ), "conventional": "feat(cli): add cli interface", "emoji": ":sparkles: add cli interface", "scipy": "ENH: add cli interface", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ) }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_feb1_c1_feat], }, }, ], }, }, ] ) # Merge the feature branch and officially release it new_version = Version.parse("1.1.0", tag_format=new_version.tag_format) merge_def_type_placeholder = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "cid": (cid_feb1_merge := "feat_branch_1_merge"), "conventional": ( merge_msg := format_merge_commit_msg_github( pr_number=next(pr_num_gen), branch_name=FEAT_BRANCH_1_NAME, ) ), "emoji": merge_msg, "scipy": merge_msg, "datetime": next(commit_timestamp_gen), "include_in_changelog": not ignore_merge_commits, }, commit_type, parser=commit_parser, ), }, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, merge_def_type_placeholder, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_feb1_merge], }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_repo_w_github_flow_w_feature_release_channel( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_github_flow_repo_w_feature_release_channel: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_github_flow_repo_w_feature_release_channel: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_github_flow_repo_w_feature_release_channel( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_github_flow_repo_w_feature_release_channel, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_github_flow_w_feature_release_channel_conventional_commits( build_repo_w_github_flow_w_feature_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = ( repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__ ) commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_feature_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_github_flow_w_feature_release_channel_emoji_commits( build_repo_w_github_flow_w_feature_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_github_flow_w_feature_release_channel_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_feature_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_github_flow_w_feature_release_channel_scipy_commits( build_repo_w_github_flow_w_feature_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_github_flow_w_feature_release_channel_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_feature_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/repos/repo_initial_commit.py000066400000000000000000000206321506116242600271060ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, TomlSerializableTypes, ) @pytest.fixture(scope="session") def deps_files_4_repo_initial_commit( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_initial_commit( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_initial_commit: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_initial_commit) @pytest.fixture(scope="session") def get_repo_definition_4_repo_w_initial_commit( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, ) -> GetRepoDefinitionFn: parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: repo_construction_steps: list[RepoActions] = [] repo_construction_steps.extend( [ { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, **(extra_configs or {}), }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": "initial_commit", "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": stable_now_date().isoformat( timespec="seconds" ), "include_in_changelog": bool( commit_type == "emoji" ), }, ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ), ), }, }, { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": "Unreleased", "dest_files": [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ], "commit_ids": [ "initial_commit", ], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_repo_w_initial_commit( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_repo_w_initial_commit: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_initial_commit: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_repo_w_initial_commit( commit_type=commit_type, ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_initial_commit, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_initial_commit( build_repo_w_initial_commit: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_initial_commit.__name__ return { "definition": build_repo_w_initial_commit( repo_name=repo_name, commit_type="conventional", # not used but required dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/repos/trunk_based_dev/000077500000000000000000000000001506116242600256425ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/fixtures/repos/trunk_based_dev/__init__.py000066400000000000000000000005631506116242600277570ustar00rootroot00000000000000from tests.fixtures.repos.trunk_based_dev.repo_w_dual_version_support import * from tests.fixtures.repos.trunk_based_dev.repo_w_dual_version_support_w_prereleases import * from tests.fixtures.repos.trunk_based_dev.repo_w_no_tags import * from tests.fixtures.repos.trunk_based_dev.repo_w_prereleases import * from tests.fixtures.repos.trunk_based_dev.repo_w_tags import * python-semantic-release-10.4.1/tests/fixtures/repos/trunk_based_dev/repo_w_dual_version_support.py000066400000000000000000000474211506116242600340650ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ( ExProjectDir, ) from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) MAINTENANCE_BRANCH_NAME = "v1.x" @pytest.fixture(scope="session") def deps_files_4_repo_w_dual_version_support( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_w_dual_version_support( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_w_dual_version_support: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_w_dual_version_support) @pytest.fixture(scope="session") def get_repo_definition_4_trunk_only_repo_w_dual_version_support( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ Builds a repository with trunk-only committing (no-branching) strategy with only official releases with latest and previous version support. """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) commit_parser = cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ) changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str or default_tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.latest": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.branches.maintenance": { "match": r"^v1\.x$", "prerelease": False, }, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, "tool.semantic_release.allow_zero_version": False, **(extra_configs or {}), }, }, } ) # Make initial release new_version = Version.parse( "1.0.0", tag_format=tag_format_str or default_tag_format_str ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c1_initial := "c1_initial_commit"), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "cid": (cid_c2_feat := "c2-feat"), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c1_initial, cid_c2_feat], }, }, ], }, }, ] ) # Make a fix and officially release it new_version = Version.parse("1.0.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c3_fix := "c3-fix"), "conventional": "fix: correct some text", "emoji": ":bug: correct some text", "scipy": "MAINT: correct some text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c3_fix], }, }, ], }, }, ] ) # Make a breaking change and officially release it new_version = Version.parse("2.0.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": MAINTENANCE_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c4_break_feat := "c4-break-feat"), "conventional": str.join( "\n\n", [ "feat: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "emoji": str.join( "\n\n", [ ":boom: add revolutionary feature", "This change is a breaking change", ], ), "scipy": str.join( "\n\n", [ "API: add revolutionary feature", "This is a breaking change", ], ), "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c4_break_feat], }, }, ], }, }, ] ) # Fix a critical bug in the previous version and officially release it new_version = Version.parse("1.0.2", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": MAINTENANCE_BRANCH_NAME}, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c5_v1_fix := "c5-fix"), "conventional": "fix: correct critical bug\n\nResolves: #123\n", "emoji": ":bug: correct critical bug\n\nResolves: #123\n", "scipy": "MAINT: correct critical bug\n\nResolves: #123\n", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "max_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c5_v1_fix], }, }, ], }, }, ] ) # Return to the latest release variant repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, # TODO: return and make another release on the latest version # currently test variant of the changelog generator can't support this ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_trunk_only_repo_w_dual_version_support( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_dual_version_support: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_dual_version_support: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_trunk_only_repo_w_dual_version_support( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_dual_version_support, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_trunk_only_dual_version_spt_conventional_commits( build_trunk_only_repo_w_dual_version_support: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_dual_version_spt_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_support( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_dual_version_spt_emoji_commits( build_trunk_only_repo_w_dual_version_support: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_dual_version_spt_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_support( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_dual_version_spt_scipy_commits( build_trunk_only_repo_w_dual_version_support: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_dual_version_spt_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_support( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } repo_w_dual_version_support_w_prereleases.py000066400000000000000000000606351506116242600367300ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/fixtures/repos/trunk_based_devfrom __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ( ExProjectDir, ) from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) MAINTENANCE_BRANCH_NAME = "v1.x" @pytest.fixture(scope="session") def deps_files_4_repo_w_dual_version_spt_w_prereleases( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_w_dual_version_spt_w_prereleases( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_w_dual_version_spt_w_prereleases: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_w_dual_version_spt_w_prereleases) @pytest.fixture(scope="session") def get_repo_definition_4_trunk_only_repo_w_dual_version_spt_w_prereleases( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ Builds a repository with trunk-only committing (no-branching) strategy with only official releases with latest and previous version support. """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) commit_parser = cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ) changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str or default_tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.latest": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.branches.maintenance": { "match": r"^v1\.x$", "prerelease": False, }, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, "tool.semantic_release.allow_zero_version": False, **(extra_configs or {}), }, }, } ) # Make initial release new_version = Version.parse( "1.0.0", tag_format=(tag_format_str or default_tag_format_str) ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c1_initial := "c1-initial_commit"), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "cid": (cid_c2_feat := "c2-feat"), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c1_initial, cid_c2_feat], }, }, ], }, }, ] ) # Make a fix and officially release it new_version = Version.parse("1.0.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c3_fix := "c3-fix"), "conventional": "fix: correct some text", "emoji": ":bug: correct some text", "scipy": "MAINT: correct some text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c3_fix], }, }, ], }, }, ] ) # Make a breaking change and officially release it new_version = Version.parse("2.0.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": MAINTENANCE_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c4_bfeat := "c4-breaking-feat1"), "conventional": str.join( "\n\n", [ "feat: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "emoji": str.join( "\n\n", [ ":boom: add revolutionary feature", "This change is a breaking change", ], ), "scipy": str.join( "\n\n", [ "API: add revolutionary feature", "This is a breaking change", ], ), "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c4_bfeat], }, }, ], }, }, ] ) # Attempt to fix a critical bug in the previous version and release it as a prerelease version # This is based on https://github.com/python-semantic-release/python-semantic-release/issues/861 new_version = Version.parse("1.0.2-hotfix.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": MAINTENANCE_BRANCH_NAME}, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c5_v1_fix := "c5-fix2"), "conventional": "fix: correct critical bug\n\nResolves: #123\n", "emoji": ":bug: correct critical bug\n\nResolves: #123\n", "scipy": "MAINT: correct critical bug\n\nResolves: #123\n", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "max_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c5_v1_fix], }, }, ], }, }, ] ) # The Hotfix didn't work, so correct it and try again new_version = Version.parse("1.0.2-hotfix.2", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c6_v1_fix := "c6-fix3"), "conventional": "fix: resolve critical bug", "emoji": ":bug: resolve critical bug", "scipy": "MAINT: resolve critical bug", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "max_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c6_v1_fix], }, }, ], }, }, ] ) # It finally was resolved so release it officially new_version = Version.parse("1.0.2", tag_format=new_version.tag_format) repo_construction_steps.extend( [ # { # "action": RepoActionStep.MAKE_COMMITS, # "details": { # "commits": convert_commit_specs_to_commit_defs( # [ # { # "cid": (cid_c7_v1_docs := "c7-docs"), # "conventional": "docs: update documentation regarding critical bug", # "emoji": ":books: update documentation regarding critical bug", # "scipy": "DOC: update documentation regarding critical bug", # "datetime": next(commit_timestamp_gen), # "include_in_changelog": True, # }, # ], # commit_type, # parser=commit_parser, # ), # }, # }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "max_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_c5_v1_fix, cid_c6_v1_fix, # cid_c7_v1_docs, ], }, }, ], }, }, ] ) # Return to the latest release variant repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, # TODO: return and make another release on the latest version # currently test variant of the changelog generator can't support this ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_trunk_only_repo_w_dual_version_spt_w_prereleases( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_dual_version_spt_w_prereleases: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_dual_version_spt_w_prereleases: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_trunk_only_repo_w_dual_version_spt_w_prereleases( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_dual_version_spt_w_prereleases, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_trunk_only_dual_version_spt_w_prereleases_conventional_commits( build_trunk_only_repo_w_dual_version_spt_w_prereleases: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = ( repo_w_trunk_only_dual_version_spt_w_prereleases_conventional_commits.__name__ ) commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_spt_w_prereleases( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_dual_version_spt_w_prereleases_emoji_commits( build_trunk_only_repo_w_dual_version_spt_w_prereleases: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_dual_version_spt_w_prereleases_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_spt_w_prereleases( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_dual_version_spt_w_prereleases_scipy_commits( build_trunk_only_repo_w_dual_version_spt_w_prereleases: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_dual_version_spt_w_prereleases_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_spt_w_prereleases( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/repos/trunk_based_dev/repo_w_no_tags.py000066400000000000000000000411121506116242600312200ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, TomlSerializableTypes, ) @pytest.fixture(scope="session") def deps_files_4_repo_w_no_tags( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_w_no_tags( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_w_no_tags: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_w_no_tags) @pytest.fixture(scope="session") def get_repo_definition_4_trunk_only_repo_w_no_tags( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, ) -> GetRepoDefinitionFn: """ Builds a repository with trunk-only committing (no-branching) strategy without any releases. """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) repo_construction_steps: list[RepoActions] = [] repo_construction_steps.extend( [ { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, **(extra_configs or {}), }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c1_initial := "c1_initial_commit"), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "cid": (cid_c2_feat1 := "c2-feat1"), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, { "cid": (cid_c3_fix1 := "c3-fix1"), "conventional": "fix: correct some text", "emoji": ":bug: correct some text", "scipy": "MAINT: correct some text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, { "cid": (cid_c4_fix2 := "c4-fix2"), "conventional": "fix: correct more text\n\nCloses: #123", "emoji": ":bug: correct more text\n\nCloses: #123", "scipy": "MAINT: correct more text\n\nCloses: #123", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ), ), }, }, { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": "Unreleased", "dest_files": [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ], "commit_ids": [ cid_c1_initial, cid_c2_feat1, cid_c3_fix1, cid_c4_fix2, ], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_trunk_only_repo_w_no_tags( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_no_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_no_tags: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_trunk_only_repo_w_no_tags( commit_type=commit_type, ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_no_tags, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_no_tags_conventional_commits_using_tag_format( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_no_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_no_tags: str, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: """ Replicates repo with no tags, but with a tag format X{version} Follows tag format defined in python-semantic-release#1137 """ repo_name = repo_w_no_tags_conventional_commits_using_tag_format.__name__ commit_type: CommitConvention = ( repo_name.split("_commits", maxsplit=1)[0].split("_")[-1] # type: ignore[assignment] ) def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_trunk_only_repo_w_no_tags( commit_type=commit_type, tag_format_str="X{version}", ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_no_tags, build_repo_func=_build_repo, dest_dir=example_project_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return { "definition": cached_repo_data["build_definition"], "repo": example_project_git_repo(), } @pytest.fixture def repo_w_no_tags_conventional_commits_w_zero_version( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_no_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_no_tags: str, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: """Replicates repo with no tags, but with allow_zero_version=True""" repo_name = repo_w_no_tags_conventional_commits_w_zero_version.__name__ commit_type: CommitConvention = ( repo_name.split("_commits", maxsplit=1)[0].split("_")[-1] # type: ignore[assignment] ) def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_trunk_only_repo_w_no_tags( commit_type=commit_type, extra_configs={ "tool.semantic_release.allow_zero_version": True, }, ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_no_tags, build_repo_func=_build_repo, dest_dir=example_project_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return { "definition": cached_repo_data["build_definition"], "repo": example_project_git_repo(), } @pytest.fixture def repo_w_no_tags_conventional_commits_unmasked_initial_release( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_no_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_no_tags: str, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: """Replicates repo with no tags, but with mask_initial_release=False""" repo_name = repo_w_no_tags_conventional_commits_unmasked_initial_release.__name__ commit_type: CommitConvention = ( repo_name.split("_commits", maxsplit=1)[0].split("_")[-1] # type: ignore[assignment] ) def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_trunk_only_repo_w_no_tags( commit_type=commit_type, extra_configs={ "tool.semantic_release.changelog.default_templates.mask_initial_release": False, }, ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_no_tags, build_repo_func=_build_repo, dest_dir=example_project_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return { "definition": cached_repo_data["build_definition"], "repo": example_project_git_repo(), } @pytest.fixture def repo_w_no_tags_conventional_commits( build_trunk_only_repo_w_no_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_no_tags_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_no_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_no_tags_emoji_commits( build_trunk_only_repo_w_no_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_no_tags_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_no_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_no_tags_scipy_commits( build_trunk_only_repo_w_no_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_no_tags_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_no_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/repos/trunk_based_dev/repo_w_prereleases.py000066400000000000000000000424601506116242600321070ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) @pytest.fixture(scope="session") def deps_files_4_repo_w_prereleases( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_w_prereleases( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_w_prereleases: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_w_prereleases) @pytest.fixture(scope="session") def get_repo_definition_4_trunk_only_repo_w_prerelease_tags( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ Builds a repository with trunk-only committing (no-branching) strategy with official and prereleases releases. """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) commit_parser = cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ) changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str or default_tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, "tool.semantic_release.allow_zero_version": True, **(extra_configs or {}), }, }, } ) # Make initial release new_version = Version.parse( "0.1.0", tag_format=(tag_format_str or default_tag_format_str) ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c1_initial := "c1-initial_commit"), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "cid": (cid_c2_feat := "c2-feat"), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c1_initial, cid_c2_feat], }, }, ], }, }, ] ) # Make a fix and release it as a release candidate new_version = Version.parse("0.1.1-rc.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c3_fix := "c2-fix"), "conventional": "fix: correct some text\n\nfixes: #123\n", "emoji": ":bug: correct some text\n\nfixes: #123\n", "scipy": "MAINT: correct some text\n\nfixes: #123\n", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c3_fix], }, }, ], }, }, ] ) # Make an additional feature change and release it as a new release candidate new_version = Version.parse("0.2.0-rc.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c4_feat := "c4-feat"), "conventional": "feat: add some more text", "emoji": ":sparkles: add some more text", "scipy": "ENH: add some more text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c4_feat], }, }, ], }, }, ] ) # Make an additional feature change and officially release the latest new_version = Version.parse("0.2.0", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c5_feat := "c5-feat"), "conventional": "feat(cli): add cli command", "emoji": ":sparkles:(cli) add cli command", "scipy": "ENH: cli: add cli command", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c5_feat], }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_trunk_only_repo_w_prerelease_tags( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_prerelease_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_prereleases: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_trunk_only_repo_w_prerelease_tags( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_prereleases, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_trunk_only_n_prereleases_conventional_commits( build_trunk_only_repo_w_prerelease_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_n_prereleases_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_prerelease_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_n_prereleases_emoji_commits( build_trunk_only_repo_w_prerelease_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_n_prereleases_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_prerelease_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_n_prereleases_scipy_commits( build_trunk_only_repo_w_prerelease_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_n_prereleases_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_prerelease_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/repos/trunk_based_dev/repo_w_tags.py000066400000000000000000000354041506116242600305330ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING, cast import pytest from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Any, Sequence from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional.parser import ( ConventionalCommitParser, ) from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.commit_parser.token import ParseResult from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ( ExProjectDir, ) from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) @pytest.fixture(scope="session") def deps_files_4_repo_w_tags( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_w_tags( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_w_tags: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_w_tags) @pytest.fixture(scope="session") def get_repo_definition_4_trunk_only_repo_w_tags( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, default_tag_format_str: str, ) -> GetRepoDefinitionFn: """ Builds a repository with trunk-only committing (no-branching) strategy with only official releases. """ parser_classes: dict[CommitConvention, CommitParser[Any, Any]] = { "conventional": default_conventional_parser, "emoji": default_emoji_parser, "scipy": default_scipy_parser, } def _get_repo_from_definition( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, ignore_merge_commits: bool = True, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) commit_parser = cast( "CommitParser[ParseResult, ParserOptions]", parser_classes[commit_type], ) changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, "mask_initial_release": mask_initial_release, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, "mask_initial_release": mask_initial_release, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str or default_tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.commit_parser_options.ignore_merge_commits": ignore_merge_commits, "tool.semantic_release.allow_zero_version": True, **(extra_configs or {}), }, }, } ) # Make initial release new_version = Version.parse( "0.1.0", tag_format=(tag_format_str or default_tag_format_str) ) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c1_initial := "c1_initial_commit"), "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "cid": (cid_c2_feat := "c2-feat"), "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [ cid_c1_initial, cid_c2_feat, ], }, }, ], }, }, ] ) # Make a fix and officially release it new_version = Version.parse("0.1.1", tag_format=new_version.tag_format) repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "cid": (cid_c3_fix := "c3-fix"), "conventional": "fix: correct some text\n\nResolves: #123\n", "emoji": ":bug: correct some text\n\nResolves: #123\n", "scipy": "MAINT: correct some text\n\nResolves: #123\n", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, parser=commit_parser, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": str(new_version), "tag_format": new_version.tag_format, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitions, "commit_ids": [cid_c3_fix], }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_definition @pytest.fixture(scope="session") def build_trunk_only_repo_w_tags( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_tags: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_trunk_only_repo_w_tags( commit_type=commit_type, ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_tags, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_trunk_only_conventional_commits_using_tag_format( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_tags: str, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_conventional_commits_using_tag_format.__name__ commit_type: CommitConvention = ( repo_name.split("_commits", maxsplit=1)[0].split("_")[-1] # type: ignore[assignment] ) def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_trunk_only_repo_w_tags( commit_type=commit_type, tag_format_str="submod-v{version}", ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_tags, build_repo_func=_build_repo, dest_dir=example_project_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return { "definition": cached_repo_data["build_definition"], "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_conventional_commits( build_trunk_only_repo_w_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_emoji_commits( build_trunk_only_repo_w_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_scipy_commits( build_trunk_only_repo_w_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-10.4.1/tests/fixtures/scipy.py000066400000000000000000000310061506116242600230540ustar00rootroot00000000000000from __future__ import annotations from itertools import chain, zip_longest from textwrap import dedent from typing import TYPE_CHECKING import pytest from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.enums import LevelBump if TYPE_CHECKING: from typing import Protocol from semantic_release.commit_parser.scipy import ScipyParserOptions class FormatScipyCommitFn(Protocol): def __call__( self, scipy_tag: str, subject: str, body_parts: list[str] ) -> str: ... @pytest.fixture(scope="session") def format_scipy_commit(): def _format_scipy_commit( scipy_tag: str, subject: str, body_parts: list[str] ) -> str: body = str.join("\n\n", body_parts) return f"{scipy_tag}: {subject}\n\n{body}" return _format_scipy_commit @pytest.fixture(scope="session") def default_scipy_parser() -> ScipyCommitParser: return ScipyCommitParser() @pytest.fixture(scope="session") def default_scipy_parser_options( default_scipy_parser: ScipyCommitParser, ) -> ScipyParserOptions: return default_scipy_parser.get_default_options() @pytest.fixture(scope="session") def scipy_chore_commit_types( default_scipy_parser_options: ScipyParserOptions, ) -> list[str]: return [ k for k, v in default_scipy_parser_options.tag_to_level.items() if v < LevelBump.PATCH ] @pytest.fixture(scope="session") def scipy_patch_commit_types( default_scipy_parser_options: ScipyParserOptions, ) -> list[str]: return [ k for k, v in default_scipy_parser_options.tag_to_level.items() if v is LevelBump.PATCH ] @pytest.fixture(scope="session") def scipy_minor_commit_types( default_scipy_parser_options: ScipyParserOptions, ) -> list[str]: return [ k for k, v in default_scipy_parser_options.tag_to_level.items() if v is LevelBump.MINOR ] @pytest.fixture(scope="session") def scipy_major_commit_types( default_scipy_parser_options: ScipyParserOptions, ) -> list[str]: return [ k for k, v in default_scipy_parser_options.tag_to_level.items() if v is LevelBump.MAJOR ] @pytest.fixture(scope="session") def scipy_nonparseable_commits() -> list[str]: return [ "Initial Commit", "Merge pull request #14447 from AnirudhDagar/rename_ndimage_modules", ] @pytest.fixture(scope="session") def scipy_chore_subjects(scipy_chore_commit_types: list[str]) -> list[str]: subjects = { "BENCH": "disable very slow benchmark in optimize_milp.py", "DEV": "add unicode check to pre-commit-hook", "DOC": "change approx_fprime doctest (#20568)", "STY": "fixed ruff & mypy issues", "TST": "Skip Cython tests for editable installs", "REL": "set version to 1.0.0", "TEST": "Add Cython tests for editable installs", } # Test fixture modification failure prevention assert len(subjects.keys()) == len(scipy_chore_commit_types) return [subjects[k] for k in scipy_chore_commit_types] @pytest.fixture(scope="session") def scipy_patch_subjects(scipy_patch_commit_types: list[str]) -> list[str]: subjects = { "BLD": "move the optimize build steps earlier into the build sequence", "BUG": "Fix invalid default bracket selection in _bracket_minimum (#20563)", "MAINT": "optimize.linprog: fix bug when integrality is a list of all zeros (#20586)", } # Test fixture modification failure prevention assert len(subjects.keys()) == len(scipy_patch_commit_types) return [subjects[k] for k in scipy_patch_commit_types] @pytest.fixture(scope="session") def scipy_minor_subjects(scipy_minor_commit_types: list[str]) -> list[str]: subjects = { "ENH": "stats.ttest_1samp: add array-API support (#20545)", # "REV": "reverted a previous commit", "FEAT": "added a new feature", } # Test fixture modification failure prevention assert len(subjects.keys()) == len(scipy_minor_commit_types) return [subjects[k] for k in scipy_minor_commit_types] @pytest.fixture(scope="session") def scipy_major_subjects(scipy_major_commit_types: list[str]) -> list[str]: subjects = { "API": "dropped support for python 3.7", "DEP": "stats: switch kendalltau to kwarg-only, remove initial_lexsort", } # Test fixture modification failure prevention assert len(subjects.keys()) == len(scipy_major_commit_types) return [subjects[k] for k in scipy_major_commit_types] @pytest.fixture(scope="session") def scipy_brk_change_commit_bodies() -> list[list[str]]: brk_chg_msg = dedent( """ BREAKING CHANGE: a description of what is now different with multiple lines """ ).strip() one_line_desc = "resolves bug related to windows incompatiblity" return [ # No regular change description [brk_chg_msg], # regular change description & breaking change message [one_line_desc, brk_chg_msg], # regular change description & breaking change message with footer [one_line_desc, brk_chg_msg, "Resolves: #666"], ] @pytest.fixture(scope="session") def scipy_nonbrking_commit_bodies() -> list[list[str]]: # a GitHub squash merge that preserved PR commit messages (all chore-like) github_squash_merge_body = str.join( "\n\n", [ "* DOC: import ropy.transform to test for numpy error", "* DOC: lower numpy version", "* DOC: lower numpy version further", "* STY: resolve linting issues", ], ) one_block_desc = dedent( """ Bug spotted on Fedora, see https://src.fedoraproject.org/rpms/scipy/pull-request/22 with an additional multiline description """ ).strip() return [ github_squash_merge_body.split("\n\n"), # split into blocks # empty body [], [""], # formatted body (ie dependabot) dedent( """ Bumps [package](https://github.com/namespace/project) from 3.5.3 to 4.1.1. - [Release notes](https://github.com/namespace/project/releases) - [Changelog](https://github.com/namespace/project/blob/4.x/CHANGES) - [Commits](https://github.com/namespace/project/commits/v4.1.1) --- updated-dependencies: - dependency-name: package dependency-type: direct:development update-type: version-update:semver-major """ ) .lstrip() .split("\n\n"), # 1 block description one_block_desc.split("\n\n"), # keywords ["[skip azp] [skip actions]"], # Resolving an issue on GitHub ["Resolves: #127"], [one_block_desc, "Closes: #1024"], ] @pytest.fixture(scope="session") def scipy_chore_commit_parts( scipy_chore_commit_types: list[str], scipy_chore_subjects: list[str], scipy_nonbrking_commit_bodies: list[list[str]], ) -> list[tuple[str, str, list[str]]]: # Test fixture modification failure prevention assert len(scipy_chore_commit_types) == len(scipy_chore_subjects) # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ (commit_type, subject, commit_body_blocks) for commit_type, subject in zip(scipy_chore_commit_types, scipy_chore_subjects) for commit_body_blocks in scipy_nonbrking_commit_bodies ] @pytest.fixture(scope="session") def scipy_chore_commits( scipy_chore_commit_parts: list[tuple[str, str, list[str]]], format_scipy_commit: FormatScipyCommitFn, ) -> list[str]: # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ format_scipy_commit(commit_type, subject, commit_body) for commit_type, subject, commit_body in scipy_chore_commit_parts ] @pytest.fixture(scope="session") def scipy_patch_commit_parts( scipy_patch_commit_types: list[str], scipy_patch_subjects: list[str], scipy_nonbrking_commit_bodies: list[list[str]], ) -> list[tuple[str, str, list[str]]]: # Test fixture modification failure prevention assert len(scipy_patch_commit_types) == len(scipy_patch_subjects) # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ (commit_type, subject, commit_body_blocks) for commit_type, subject in zip(scipy_patch_commit_types, scipy_patch_subjects) for commit_body_blocks in scipy_nonbrking_commit_bodies ] @pytest.fixture(scope="session") def scipy_patch_commits( scipy_patch_commit_parts: list[tuple[str, str, list[str]]], format_scipy_commit: FormatScipyCommitFn, ) -> list[str]: # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ format_scipy_commit(commit_type, subject, commit_body) for commit_type, subject, commit_body in scipy_patch_commit_parts ] @pytest.fixture(scope="session") def scipy_minor_commit_parts( scipy_minor_commit_types: list[str], scipy_minor_subjects: list[str], scipy_nonbrking_commit_bodies: list[list[str]], ) -> list[tuple[str, str, list[str]]]: # Test fixture modification failure prevention assert len(scipy_minor_commit_types) == len(scipy_minor_subjects) # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ (commit_type, subject, commit_body_blocks) for commit_type, subject in zip(scipy_minor_commit_types, scipy_minor_subjects) for commit_body_blocks in scipy_nonbrking_commit_bodies ] @pytest.fixture(scope="session") def scipy_minor_commits( scipy_minor_commit_parts: list[tuple[str, str, list[str]]], format_scipy_commit: FormatScipyCommitFn, ) -> list[str]: # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ format_scipy_commit(commit_type, subject, commit_body) for commit_type, subject, commit_body in scipy_minor_commit_parts ] @pytest.fixture(scope="session") def scipy_major_commit_parts( scipy_major_commit_types: list[str], scipy_major_subjects: list[str], scipy_brk_change_commit_bodies: list[list[str]], ) -> list[tuple[str, str, list[str]]]: # Test fixture modification failure prevention assert len(scipy_major_commit_types) == len(scipy_major_subjects) # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ (commit_type, subject, commit_body_blocks) for commit_type, subject in zip(scipy_major_commit_types, scipy_major_subjects) for commit_body_blocks in scipy_brk_change_commit_bodies ] @pytest.fixture(scope="session") def scipy_major_commits( scipy_major_commit_parts: list[tuple[str, str, list[str]]], format_scipy_commit: FormatScipyCommitFn, ) -> list[str]: # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ format_scipy_commit(commit_type, subject, commit_body) for commit_type, subject, commit_body in scipy_major_commit_parts ] @pytest.fixture(scope="session") def scipy_patch_mixed_commits( scipy_patch_commits: list[str], scipy_chore_commits: list[str], ) -> list[str]: return list( filter( None, chain.from_iterable(zip_longest(scipy_patch_commits, scipy_chore_commits)), ) ) @pytest.fixture(scope="session") def scipy_minor_mixed_commits( scipy_minor_commits: list[str], scipy_patch_commits: list[str], scipy_chore_commits: list[str], ) -> list[str]: return list( chain.from_iterable( zip_longest( scipy_minor_commits, scipy_patch_commits, scipy_chore_commits, fillvalue="uninteresting", ) ) ) @pytest.fixture(scope="session") def scipy_major_mixed_commits( scipy_major_commits: list[str], scipy_minor_commits: list[str], scipy_patch_commits: list[str], scipy_chore_commits: list[str], ) -> list[str]: return list( filter( None, chain.from_iterable( zip_longest( scipy_major_commits, scipy_minor_commits, scipy_patch_commits, scipy_chore_commits, ) ), ) ) python-semantic-release-10.4.1/tests/gh_action/000077500000000000000000000000001506116242600214355ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/gh_action/example_project/000077500000000000000000000000001506116242600246165ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/gh_action/example_project/pyproject.toml000066400000000000000000000001151506116242600275270ustar00rootroot00000000000000[project] name = "example" version = "0.0.0" description = "Example project" python-semantic-release-10.4.1/tests/gh_action/example_project/releaserc.toml000066400000000000000000000000531506116242600274560ustar00rootroot00000000000000[semantic-release] commit_parser = "emoji" python-semantic-release-10.4.1/tests/gh_action/run.sh000066400000000000000000000073251506116242600226040ustar00rootroot00000000000000#!/bin/bash set -eu if ! command -v realpath &>/dev/null; then realpath() { readlink -f "$1" } fi TEST_DIR="$(realpath "$(dirname "${BASH_SOURCE[0]}")")" PROJ_DIR="$(realpath "$(dirname "$TEST_DIR")/..")" EXAMPLE_PROJECT_BASE_DIR="${EXAMPLE_PROJECT_BASE_DIR:-"$TEST_DIR/example_project"}" if [ -z "${UTILS_LOADED:-}" ]; then # shellcheck source=tests/utils.sh source "$TEST_DIR/utils.sh" fi create_example_project() { local EXAMPLE_PROJECT_DIR="$1" log "Creating example project in: $EXAMPLE_PROJECT_DIR" mkdir -vp "$(dirname "$EXAMPLE_PROJECT_DIR")" cp -r "${EXAMPLE_PROJECT_BASE_DIR}" "$EXAMPLE_PROJECT_DIR" log "Constructing git history in repository" pushd "$EXAMPLE_PROJECT_DIR" >/dev/null || return 1 # Initialize and configure git (remove any signature requirements) git init git config --local user.email "developer@users.noreply.github.com" git config --local user.name "developer" git config --local commit.gpgSign false git config --local tag.gpgSign false git remote add origin "https://github.com/python-semantic-release/example-project.git" # Create initial commit and tag git add . git commit -m "Initial commit" # set default branch to main git branch -m main # Create the first release (with commit & tag) cat <pyproject.toml [project] name = "example" version = "1.0.0" description = "Example project" EOF git commit -am '1.0.0' git tag -a v1.0.0 -m "v1.0.0" popd >/dev/null || return 1 log "Example project created successfully" } # ------------------------------ # TEST SUITE DRIVER # ------------------------------ run_test_suite() { local ALL_TEST_FNS # Dynamically import all test scripts for test_script in "$TEST_DIR"/suite/test_*.sh; do if [ -f "$test_script" ]; then if ! source "$test_script"; then error "Failed to load test script: $test_script" fi fi done # Extract all test functions tests_in_env="$(compgen -A function | grep "^test_")" read -r -a ALL_TEST_FNS <<< "$(printf '%s' "$tests_in_env" | tr '\n' ' ')" log "" log "************************" log "* Running test suite *" log "************************" # Incrementally run all test functions and flag if any fail local test_index=1 local test_failures=0 for test_fn in "${ALL_TEST_FNS[@]}"; do if command -v "$test_fn" &>/dev/null; then if ! "$test_fn" "$test_index"; then ((test_failures++)) fi fi log "--------------------------------------------------------------------------------" ((test_index++)) done log "" log "************************" log "* Test Summary *" log "************************" log "" log "Total tests executed: ${#ALL_TEST_FNS[@]}" log "Successes: $((${#ALL_TEST_FNS[@]} - test_failures))" log "Failures: $test_failures" if [ "$test_failures" -gt 0 ]; then return 1 fi } # ------------------------------ # MAIN # ------------------------------ log "================================================================================" log "|| PSR Version Action Test Runner ||" log "================================================================================" log "Initializing..." # Make absolute path to project directory PROJECT_MOUNT_DIR="${PROJ_DIR:?}/${PROJECT_MOUNT_DIR:?}" log "" log "******************************" log "* Running test suite setup *" log "******************************" log "" # Setup project environment create_example_project "$PROJECT_MOUNT_DIR" trap 'rm -rf "${PROJECT_MOUNT_DIR:?}"' EXIT run_test_suite python-semantic-release-10.4.1/tests/gh_action/suite/000077500000000000000000000000001506116242600225665ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/gh_action/suite/test_version.sh000066400000000000000000000062521506116242600256530ustar00rootroot00000000000000#!/bin/bash __file__="$(realpath "${BASH_SOURCE[0]}")" __directory__="$(dirname "${__file__}")" if ! [ "${UTILS_LOADED}" = "true" ]; then # shellcheck source=tests/utils.sh source "$__directory__/../utils.sh" fi test_version() { # Using default configuration within PSR with no modifications # triggering the NOOP mode to prevent errors since the repo doesn't exist # We are just trying to test that the root options & tag arguments are # passed to the action without a fatal error local index="${1:?Index not provided}" local test_name="${FUNCNAME[0]}" # Create expectations & set env variables that will be passed in for Docker command local WITH_VAR_GITHUB_TOKEN="ghp_1x2x3x4x5x6x7x8x9x0x1x2x3x4x5x6x7x8x9x0" local WITH_VAR_NO_OPERATION_MODE="true" local WITH_VAR_VERBOSITY="2" local expected_psr_cmd=".*/bin/semantic-release -vv --noop version" # Execute the test & capture output # Fatal errors if exit code is not 0 local output="" if ! output="$(run_test "$index. $test_name" 2>&1)"; then # Log the output for debugging purposes log "$output" error "fatal error occurred!" error "::error:: $test_name failed!" return 1 fi # Evaluate the output to ensure the expected command is present if ! printf '%s' "$output" | grep -q -E "$expected_psr_cmd"; then # Log the output for debugging purposes log "$output" error "Failed to find the expected command in the output!" error "\tExpected Command: $expected_psr_cmd" error "::error:: $test_name failed!" return 1 fi log "\n$index. $test_name: PASSED!" } test_version_w_custom_config() { # Using default configuration within PSR with no modifications # triggering the NOOP mode to prevent errors since the repo doesn't exist # We are just trying to test that the root options & tag arguments are # passed to the action without a fatal error local index="${1:?Index not provided}" local test_name="${FUNCNAME[0]}" # Create expectations & set env variables that will be passed in for Docker command local WITH_VAR_GITHUB_TOKEN="ghp_1x2x3x4x5x6x7x8x9x0x1x2x3x4x5x6x7x8x9x0" local WITH_VAR_NO_OPERATION_MODE="true" local WITH_VAR_VERBOSITY="0" local WITH_VAR_CONFIG_FILE="releaserc.toml" local expected_psr_cmd=".*/bin/semantic-release --config $WITH_VAR_CONFIG_FILE --noop version" # Execute the test & capture output # Fatal errors if exit code is not 0 local output="" if ! output="$(run_test "$index. $test_name" 2>&1)"; then # Log the output for debugging purposes log "$output" error "fatal error occurred!" error "::error:: $test_name failed!" return 1 fi # Evaluate the output to ensure the expected command is present if ! printf '%s' "$output" | grep -q "$expected_psr_cmd"; then # Log the output for debugging purposes log "$output" error "Failed to find the expected command in the output!" error "\tExpected Command: $expected_psr_cmd" error "::error:: $test_name failed!" return 1 fi log "\n$index. $test_name: PASSED!" } python-semantic-release-10.4.1/tests/gh_action/suite/test_version_strict.sh000066400000000000000000000034441506116242600272430ustar00rootroot00000000000000#!/bin/bash __file__="$(realpath "${BASH_SOURCE[0]}")" __directory__="$(dirname "${__file__}")" if ! [ "${UTILS_LOADED:-false}" = "true" ]; then # shellcheck source=tests/utils.sh source "$__directory__/../utils.sh" fi test_version_strict() { # Using default configuration within PSR with no modifications # triggering the NOOP mode to prevent errors since the repo doesn't exist # We are just trying to test that the root options & tag arguments are # passed to the action without a fatal error local index="${1:?Index not provided}" local test_name="${FUNCNAME[0]}" # Create expectations & set env variables that will be passed in for Docker command local WITH_VAR_GITHUB_TOKEN="ghp_1x2x3x4x5x6x7x8x9x0x1x2x3x4x5x6x7x8x9x0" local WITH_VAR_NO_OPERATION_MODE="true" local WITH_VAR_STRICT="true" local expected_psr_cmd=".*/bin/semantic-release -v --strict --noop version" # Since the example project is at the latest release, we expect strict mode # to fail with a non-zero exit code # Execute the test & capture output local output="" if output="$(run_test "$index. $test_name" 2>&1)"; then # Log the output for debugging purposes log "$output" error "Strict mode should of exited with a non-zero exit code but didn't!" error "::error:: $test_name failed!" return 1 fi # Evaluate the output to ensure the expected command is present if ! printf '%s' "$output" | grep -q "$expected_psr_cmd"; then # Log the output for debugging purposes log "$output" error "Failed to find the expected command in the output!" error "\tExpected Command: $expected_psr_cmd" error "::error:: $test_name failed!" return 1 fi log "\n$index. $test_name: PASSED!" }python-semantic-release-10.4.1/tests/gh_action/utils.sh000066400000000000000000000041401506116242600231300ustar00rootroot00000000000000#!/bin/bash # ------------------------------ # UTILS # ------------------------------ IMAGE_TAG="${TEST_CONTAINER_TAG:?TEST_CONTAINER_TAG not set}" PROJECT_MOUNT_DIR="${PROJECT_MOUNT_DIR:-"tmp/project"}" GITHUB_ACTIONS_CWD="/github/workspace" log() { printf '%b\n' "$*" } error() { log >&2 "\033[31m$*\033[0m" } explicit_run_cmd() { local cmd="$*" log "$> $cmd\n" eval "$cmd" } run_test() { local test_name="${1:?Test name not provided}" test_name="${test_name//_/ }" test_name="$(tr "[:lower:]" "[:upper:]" <<< "${test_name:0:1}")${test_name:1}" # Set Defaults based on action.yml [ -z "${WITH_VAR_DIRECTORY:-}" ] && local WITH_VAR_DIRECTORY="." [ -z "${WITH_VAR_CONFIG_FILE:-}" ] && local WITH_VAR_CONFIG_FILE="" [ -z "${WITH_VAR_NO_OPERATION_MODE:-}" ] && local WITH_VAR_NO_OPERATION_MODE="false" [ -z "${WITH_VAR_VERBOSITY:-}" ] && local WITH_VAR_VERBOSITY="1" # Extract all WITH_VAR_ variables dynamically from environment local ENV_ARGS=() args_in_env="$(compgen -A variable | grep "^WITH_VAR_")" read -r -a ENV_ARGS <<< "$(printf '%s' "$args_in_env" | tr '\n' ' ')" # Set Docker arguments (default: always remove the container after execution) local DOCKER_ARGS=("--rm") # Add all WITH_VAR_ variables to the Docker command local actions_input_var_name="" for input in "${ENV_ARGS[@]}"; do # Convert WITH_VAR_ to INPUT_ to simulate GitHub Actions input syntax actions_input_var_name="INPUT_${input#WITH_VAR_}" # Add the environment variable to the Docker command DOCKER_ARGS+=("-e ${actions_input_var_name}='${!input}'") done # Add the project directory to the Docker command DOCKER_ARGS+=("-v ${PROJECT_MOUNT_DIR}:${GITHUB_ACTIONS_CWD}") # Set the working directory to the project directory DOCKER_ARGS+=("-w ${GITHUB_ACTIONS_CWD}") # Run the test log "\n$test_name" log "--------------------------------------------------------------------------------" if ! explicit_run_cmd "docker run ${DOCKER_ARGS[*]} $IMAGE_TAG"; then return 1 fi } export UTILS_LOADED="true" python-semantic-release-10.4.1/tests/unit/000077500000000000000000000000001506116242600204615ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/__init__.py000066400000000000000000000000001506116242600225600ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/conftest.py000066400000000000000000000010521506116242600226560ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path import pytest @pytest.hookimpl(tryfirst=True) def pytest_collection_modifyitems(items: list[pytest.Item]) -> None: """Apply the unit marker to all tests in the unit test directory.""" unit_test_directory = Path(__file__).parent for item in items: if unit_test_directory in item.path.parents: item.add_marker(pytest.mark.unit) if "order" not in [mark.name for mark in item.own_markers]: item.add_marker(pytest.mark.order("first")) python-semantic-release-10.4.1/tests/unit/semantic_release/000077500000000000000000000000001506116242600237645ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/__init__.py000066400000000000000000000000001506116242600260630ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/changelog/000077500000000000000000000000001506116242600257135ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/changelog/__init__.py000066400000000000000000000000001506116242600300120ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/changelog/conftest.py000066400000000000000000000322211506116242600301120ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from textwrap import dedent from typing import TYPE_CHECKING import pytest from git import Commit, Object, Repo from semantic_release.changelog.release_history import Release, ReleaseHistory from semantic_release.commit_parser.token import ParsedCommit from semantic_release.enums import LevelBump from semantic_release.version.version import Version if TYPE_CHECKING: from git import Actor from tests.conftest import GetStableDateNowFn @pytest.fixture def artificial_release_history( commit_author: Actor, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() first_version = Version.parse("1.0.0") second_version = first_version.bump(LevelBump.MINOR) fix_commit_subject = "fix a problem" fix_commit_type = "fix" fix_commit_scope = "cli" fix_commit = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=f"{fix_commit_type}({fix_commit_scope}): {fix_commit_subject}", ) fix_commit_parsed = ParsedCommit( bump=LevelBump.PATCH, type="fix", scope=fix_commit_scope, descriptions=[fix_commit_subject], breaking_descriptions=[], commit=fix_commit, ) fix_commit_2_subject = "alphabetically first to solve a non-scoped problem" fix_commit_2_type = "fix" fix_commit_2_scope = "" fix_commit_2 = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=f"{fix_commit_2_type}: {fix_commit_2_subject}", ) fix_commit_2_parsed = ParsedCommit( bump=LevelBump.PATCH, type="fix", scope=fix_commit_2_scope, descriptions=[fix_commit_2_subject], breaking_descriptions=[], commit=fix_commit_2, ) fix_commit_3_subject = "alphabetically first to solve a scoped problem" fix_commit_3_type = "fix" fix_commit_3_scope = "cli" fix_commit_3 = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=f"{fix_commit_3_type}({fix_commit_3_scope}): {fix_commit_3_subject}", ) fix_commit_3_parsed = ParsedCommit( bump=LevelBump.PATCH, type="fix", scope=fix_commit_3_scope, descriptions=[fix_commit_3_subject], breaking_descriptions=[], commit=fix_commit_3, ) feat_commit_subject = "add a new feature" feat_commit_type = "feat" feat_commit_scope = "cli" feat_commit = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=f"{feat_commit_type}({feat_commit_scope}): {feat_commit_subject}", ) feat_commit_parsed = ParsedCommit( bump=LevelBump.MINOR, type="feature", scope=feat_commit_scope, descriptions=[feat_commit_subject], breaking_descriptions=[], commit=feat_commit, ) return ReleaseHistory( unreleased={"feature": [feat_commit_parsed]}, released={ second_version: Release( tagger=commit_author, committer=commit_author, tagged_date=current_datetime, elements={ # Purposefully inserted out of order, should be dictsorted in templates "fix": [ # Purposefully inserted out of alphabetical order, should be sorted in templates fix_commit_parsed, fix_commit_2_parsed, # has no scope fix_commit_3_parsed, # has same scope as 1 ], "feature": [feat_commit_parsed], }, version=second_version, ), first_version: Release( tagger=commit_author, committer=commit_author, tagged_date=current_datetime - timedelta(minutes=1), elements={"feature": [feat_commit_parsed]}, version=first_version, ), }, ) @pytest.fixture def release_history_w_brk_change( artificial_release_history: ReleaseHistory, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() latest_version = next(iter(artificial_release_history.released.keys())) next_version = latest_version.bump(LevelBump.MAJOR) brk_commit_subject = "fix a problem" brk_commit_type = "fix" brk_commit_scope = "cli" brk_change_msg = "this is a breaking change" brk_commit = Commit( Repo("."), Object.NULL_BIN_SHA, message=str.join( "\n\n", [ f"{brk_commit_type}({brk_commit_scope}): {brk_commit_subject}", f"BREAKING CHANGE: {brk_change_msg}", ], ), ) brk_commit_parsed = ParsedCommit( bump=LevelBump.MAJOR, type=brk_commit_type, scope=brk_commit_scope, descriptions=[brk_commit_subject], breaking_descriptions=[brk_change_msg], commit=brk_commit, ) return ReleaseHistory( unreleased={}, released={ next_version: Release( tagger=artificial_release_history.released[latest_version]["tagger"], committer=artificial_release_history.released[latest_version][ "committer" ], tagged_date=current_datetime, elements={"Bug Fixes": [brk_commit_parsed]}, version=next_version, ), **artificial_release_history.released, }, ) @pytest.fixture def release_history_w_multiple_brk_changes( release_history_w_brk_change: ReleaseHistory, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() latest_version = next(iter(release_history_w_brk_change.released.keys())) brk_commit_subject = "adding a revolutionary feature" brk_commit_type = "feat" brk_change_msg = "The feature changes everything in a breaking way" brk_commit = Commit( Repo("."), Object.NULL_BIN_SHA, message=str.join( "\n\n", [ f"{brk_commit_type}: {brk_commit_subject}", f"BREAKING CHANGE: {brk_change_msg}", ], ), ) brk_commit_parsed = ParsedCommit( bump=LevelBump.MAJOR, type=brk_commit_type, scope="", # No scope in this commit descriptions=[brk_commit_subject], breaking_descriptions=[brk_change_msg], commit=brk_commit, ) return ReleaseHistory( unreleased={}, released={ **release_history_w_brk_change.released, # Replaces and inserts a new commit of different type with breaking changes latest_version: Release( tagger=release_history_w_brk_change.released[latest_version]["tagger"], committer=release_history_w_brk_change.released[latest_version][ "committer" ], tagged_date=current_datetime, elements={ **release_history_w_brk_change.released[latest_version]["elements"], "Features": [brk_commit_parsed], }, version=latest_version, ), }, ) @pytest.fixture def single_release_history( artificial_release_history: ReleaseHistory, ) -> ReleaseHistory: version = list(artificial_release_history.released.keys())[-1] return ReleaseHistory( unreleased={}, released={ version: artificial_release_history.released[version], }, ) @pytest.fixture def release_history_w_a_notice( artificial_release_history: ReleaseHistory, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() latest_version = next(iter(artificial_release_history.released.keys())) next_version = latest_version.bump(LevelBump.PATCH) notice_commit_subject = "deprecate a type" notice_commit_type = "refactor" notice_commit_scope = "cli" release_notice = dedent( """\ This is a multline release notice that is made up of two lines. """ ) notice_commit = Commit( Repo("."), Object.NULL_BIN_SHA, message=str.join( "\n\n", [ f"{notice_commit_type}({notice_commit_scope}): {notice_commit_subject}", f"NOTICE: {release_notice}", ], ), ) notice_commit_parsed = ParsedCommit( bump=LevelBump.NO_RELEASE, type=notice_commit_type, scope=notice_commit_scope, descriptions=[notice_commit_subject], breaking_descriptions=[], release_notices=(release_notice.replace("\n", " ").strip(),), commit=notice_commit, ) return ReleaseHistory( unreleased={}, released={ next_version: Release( tagger=artificial_release_history.released[latest_version]["tagger"], committer=artificial_release_history.released[latest_version][ "committer" ], tagged_date=current_datetime, elements={"Refactoring": [notice_commit_parsed]}, version=next_version, ), **artificial_release_history.released, }, ) @pytest.fixture def release_history_w_notice_n_brk_change( artificial_release_history: ReleaseHistory, release_history_w_a_notice: ReleaseHistory, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() latest_version = next(iter(artificial_release_history.released.keys())) next_version = latest_version.bump(LevelBump.MAJOR) brk_commit_subject = "fix a problem" brk_commit_type = "fix" brk_commit_scope = "cli" brk_change_msg = "this is a breaking change" brk_commit = Commit( Repo("."), Object.NULL_BIN_SHA, message=str.join( "\n\n", [ f"{brk_commit_type}({brk_commit_scope}): {brk_commit_subject}", f"BREAKING CHANGE: {brk_change_msg}", ], ), ) brk_commit_parsed = ParsedCommit( bump=LevelBump.MAJOR, type=brk_commit_type, scope=brk_commit_scope, descriptions=[brk_commit_subject], breaking_descriptions=[brk_change_msg], commit=brk_commit, ) last_notice_release = next(iter(release_history_w_a_notice.released.keys())) return ReleaseHistory( unreleased={}, released={ next_version: Release( tagger=artificial_release_history.released[latest_version]["tagger"], committer=artificial_release_history.released[latest_version][ "committer" ], tagged_date=current_datetime, elements={ "Bug Fixes": [brk_commit_parsed], **release_history_w_a_notice.released[last_notice_release][ "elements" ], }, version=next_version, ), **artificial_release_history.released, }, ) @pytest.fixture def release_history_w_multiple_notices( release_history_w_a_notice: ReleaseHistory, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() latest_version = next(iter(release_history_w_a_notice.released.keys())) notice_commit_subject = "add a configurable feature" notice_commit_type = "feat" notice_commit_scope = "cli-config" release_notice = dedent( """\ This is a multline release notice that is its own paragraph to detail the configurable feature. """ ) notice_commit = Commit( Repo("."), Object.NULL_BIN_SHA, message=str.join( "\n\n", [ f"{notice_commit_type}({notice_commit_scope}): {notice_commit_subject}", f"NOTICE: {release_notice}", ], ), ) notice_commit_parsed = ParsedCommit( bump=LevelBump.MINOR, type=notice_commit_type, scope=notice_commit_scope, descriptions=[notice_commit_subject], breaking_descriptions=[], release_notices=(release_notice.replace("\n", " ").strip(),), commit=notice_commit, ) return ReleaseHistory( unreleased={}, released={ **release_history_w_a_notice.released, # Replaces and inserts a new commit of different type with breaking changes latest_version: Release( tagger=release_history_w_a_notice.released[latest_version]["tagger"], committer=release_history_w_a_notice.released[latest_version][ "committer" ], tagged_date=current_datetime, elements={ "Features": [notice_commit_parsed], **release_history_w_a_notice.released[latest_version]["elements"], }, version=latest_version, ), }, ) python-semantic-release-10.4.1/tests/unit/semantic_release/changelog/test_changelog_context.py000066400000000000000000000545331506116242600330310ustar00rootroot00000000000000from __future__ import annotations import os from datetime import datetime from textwrap import dedent from typing import TYPE_CHECKING from unittest import mock import pytest from git import Commit, Object, Repo from semantic_release.changelog.context import ChangelogMode, make_changelog_context from semantic_release.changelog.release_history import Release, ReleaseHistory from semantic_release.changelog.template import environment from semantic_release.commit_parser import ParsedCommit from semantic_release.enums import LevelBump from semantic_release.hvcs import Bitbucket, Gitea, Github, Gitlab from semantic_release.version.translator import Version from tests.const import EXAMPLE_CHANGELOG_MD_CONTENT if TYPE_CHECKING: from pathlib import Path from git import Actor from tests.fixtures.git_repo import BuildRepoFn @pytest.fixture def changelog_tpl_github_context() -> str: """Returns an changelog template which uses all the GitHub configured filters""" return dedent( r""" # Changelog > Repository: {{ "[%s](%s)" | format(context.hvcs_type | capitalize, "/" | create_repo_url) }} ## v2.0.0 {{ "[Change Summary](%s)" | format("v1.0.0" | compare_url("v2.0.0")) }} ### Bug Fixes - Fixed a minor bug {{ "([#%s](%s))" | format(22, 22 | pull_request_url) }} - **cli:** fix a problem {{ "([%s](%s))" | format("000000", "000000" | commit_hash_url) }} ### Resolved Issues - {{ "[#%s](%s)" | format(19, 19 | issue_url) }} """ ).lstrip() @pytest.fixture def changelog_tpl_gitea_context() -> str: """Returns an changelog template which uses all the Gitea configured filters""" return dedent( r""" # Changelog > Repository: {{ "[%s](%s)" | format(context.hvcs_type | capitalize, "/" | create_repo_url) }} ## v2.0.0 ### Bug Fixes - Fixed a minor bug {{ "([#%s](%s))" | format(22, 22 | pull_request_url) }} - **cli:** fix a problem {{ "([%s](%s))" | format("000000", "000000" | commit_hash_url) }} ### Resolved Issues - {{ "[#%s](%s)" | format(19, 19 | issue_url) }} """ ).lstrip() @pytest.fixture def changelog_tpl_gitlab_context() -> str: """Returns an changelog template which uses all the GitLab configured filters""" return dedent( r""" # Changelog > Repository: {{ "[%s](%s)" | format(context.hvcs_type | capitalize, "/" | create_repo_url) }} ## v2.0.0 {{ "[Change Summary](%s)" | format("v1.0.0" | compare_url("v2.0.0")) }} ### Bug Fixes - Fixed a minor bug {{ "([#%s](%s))" | format(22, 22 | merge_request_url) }} - Fixed a performance bug {{ "([#%s](%s))" | format(25, 25 | pull_request_url) }} - **cli:** fix a problem {{ "([%s](%s))" | format("000000", "000000" | commit_hash_url) }} ### Resolved Issues - {{ "[#%s](%s)" | format(19, 19 | issue_url) }} """ ).lstrip() @pytest.fixture def changelog_tpl_bitbucket_context() -> str: """Returns an changelog template which uses all the BitBucket configured filters""" return dedent( r""" # Changelog > Repository: {{ "[%s](%s)" | format(context.hvcs_type | capitalize, "/" | create_repo_url) }} ## v2.0.0 {{ "[Change Summary](%s)" | format("v1.0.0" | compare_url("v2.0.0")) }} ### Bug Fixes - Fixed a minor bug {{ "([#%s](%s))" | format(22, 22 | pull_request_url) }} - **cli:** fix a problem {{ "([%s](%s))" | format("000000", "000000" | commit_hash_url) }} """ ).lstrip() @pytest.fixture def artificial_release_history(commit_author: Actor): version = Version.parse("1.0.0") commit_subject = "fix(cli): fix a problem" fix_commit = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=commit_subject, ) fix_commit_parsed = ParsedCommit( bump=LevelBump.PATCH, type="fix", scope="cli", descriptions=[commit_subject], breaking_descriptions=[], commit=fix_commit, ) commit_subject = "feat(cli): add a new feature" feat_commit = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=commit_subject, ) feat_commit_parsed = ParsedCommit( bump=LevelBump.MINOR, type="feat", scope="cli", descriptions=[commit_subject], breaking_descriptions=[], commit=feat_commit, ) return ReleaseHistory( unreleased={ "feature": [feat_commit_parsed], }, released={ version: Release( tagger=commit_author, committer=commit_author, tagged_date=datetime.now(), elements={ "feature": [feat_commit_parsed], "fix": [fix_commit_parsed], }, version=version, ) }, ) def test_changelog_context_bitbucket( changelog_tpl_bitbucket_context: str, example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): hvcs = Bitbucket(example_git_https_url) expected_changelog = str.join( "\n", [ "# Changelog", "", f'> Repository: [{hvcs.__class__.__name__.capitalize()}]({hvcs.create_repo_url("")})', "", "## v2.0.0", "", f'[Change Summary]({hvcs.compare_url("v1.0.0", "v2.0.0")})', "", "### Bug Fixes", "", f"- Fixed a minor bug ([#22]({hvcs.pull_request_url(22)}))", f'- **cli:** fix a problem ([000000]({hvcs.commit_hash_url("000000")}))', "", ], ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl_bitbucket_context).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_github( changelog_tpl_github_context: str, example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): hvcs = Github(example_git_https_url) expected_changelog = str.join( "\n", [ "# Changelog", "", f'> Repository: [{hvcs.__class__.__name__.capitalize()}]({hvcs.create_repo_url("")})', "", "## v2.0.0", "", f'[Change Summary]({hvcs.compare_url("v1.0.0", "v2.0.0")})', "", "### Bug Fixes", "", f"- Fixed a minor bug ([#22]({hvcs.pull_request_url(22)}))", f'- **cli:** fix a problem ([000000]({hvcs.commit_hash_url("000000")}))', "", "### Resolved Issues", "", f"- [#19]({hvcs.issue_url(19)})", "", ], ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl_github_context).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_gitea( changelog_tpl_gitea_context: str, example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): hvcs = Gitea(example_git_https_url) expected_changelog = str.join( "\n", [ "# Changelog", "", f'> Repository: [{hvcs.__class__.__name__.capitalize()}]({hvcs.create_repo_url("")})', "", "## v2.0.0", "", "### Bug Fixes", "", f"- Fixed a minor bug ([#22]({hvcs.pull_request_url(22)}))", f'- **cli:** fix a problem ([000000]({hvcs.commit_hash_url("000000")}))', "", "### Resolved Issues", "", f"- [#19]({hvcs.issue_url(19)})", "", ], ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl_gitea_context).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_gitlab( changelog_tpl_gitlab_context: str, example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): hvcs = Gitlab(example_git_https_url) expected_changelog = str.join( "\n", [ "# Changelog", "", f'> Repository: [{hvcs.__class__.__name__.capitalize()}]({hvcs.create_repo_url("")})', "", "## v2.0.0", "", f'[Change Summary]({hvcs.compare_url("v1.0.0", "v2.0.0")})', "", "### Bug Fixes", "", f"- Fixed a minor bug ([#22]({hvcs.merge_request_url(22)}))", f"- Fixed a performance bug ([#25]({hvcs.pull_request_url(25)}))", f'- **cli:** fix a problem ([000000]({hvcs.commit_hash_url("000000")}))', "", "### Resolved Issues", "", f"- [#19]({hvcs.issue_url(19)})", "", ], ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl_gitlab_context).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_read_file( example_git_https_url: str, build_configured_base_repo: BuildRepoFn, artificial_release_history: ReleaseHistory, changelog_md_file: Path, change_to_ex_proj_dir: Path, example_project_dir: Path, ): build_configured_base_repo(example_project_dir) # normalize expected to os specific newlines expected_changelog = str.join( os.linesep, [ *[ line.replace("\r", "") for line in EXAMPLE_CHANGELOG_MD_CONTENT.strip().split("\n") ], "", ], ) changelog_tpl = """{{ "%s" | read_file | trim }}%ls""".replace( "%s", str(changelog_md_file) ).replace("%ls", os.linesep) env = environment( newline_sequence="\n" if os.linesep == "\n" else "\r\n", trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True, autoescape=False, ) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog.encode() == actual_changelog.encode() @pytest.mark.parametrize("file_path", ["", "nonexistent.md"]) def test_changelog_context_read_file_fails_gracefully( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, file_path: str, ): changelog_tpl = """{{ "%s" | read_file }}""".replace("%s", file_path) expected_changelog = "" env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_autofit_text_width( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = """{{ "This is a long line that should be autofitted" | autofit_text_width(20) }}""" expected_changelog = "This is a long line\nthat should be\nautofitted" env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_autofit_text_width_w_indent( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = """{{ "This is a long line that should be autofitted" | autofit_text_width(20, indent_size=2) }}""" expected_changelog = "This is a long line\n that should be\n autofitted" env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_sort_numerically( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = dedent( """\ {{ [ ".. _#5: link", ".. _PR#3: link", ".. _PR#10: link", ".. _#100: link" ] | sort_numerically | join("\\n") }} """ ) expected_changelog = dedent( """\ .. _#5: link .. _#100: link .. _PR#3: link .. _PR#10: link """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_sort_numerically_reverse( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = dedent( """\ {{ [ ".. _#5: link", ".. _PR#3: link", ".. _PR#10: link", ".. _#100: link" ] | sort_numerically(reverse=True) | join("\\n") }} """ ) expected_changelog = dedent( """\ .. _#100: link .. _#5: link .. _PR#10: link .. _PR#3: link """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_pypi_url_filter( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = dedent( """\ {{ "example-package" | create_pypi_url }} """ ) expected_changelog = dedent( """\ https://pypi.org/project/example-package """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_pypi_url_filter_tagged( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): version = "1.0.0" changelog_tpl = dedent( """\ {% set release = context.history.released.values() | first %}{{ "example-package" | create_pypi_url(release.version | string) }} """ ) expected_changelog = dedent( f"""\ https://pypi.org/project/example-package/{version} """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client_class", [Github, Gitlab, Gitea]) def test_changelog_context_release_url_filter( example_git_https_url: str, hvcs_client_class: type[Github | Gitlab | Gitea], artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): version = list(artificial_release_history.released.keys())[-1] changelog_tpl = dedent( """\ {% set release = context.history.released.values() | first %}{{ "[%s](%s)" | format( release.version.as_tag(), release.version.as_tag() | create_release_url, ) }} """ ) with mock.patch.dict(os.environ, {}, clear=True): hvcs_client = hvcs_client_class(remote_url=example_git_https_url) expected_changelog = dedent( f"""\ [{version.as_tag()}]({hvcs_client.create_release_url(version.as_tag())}) """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs_client, release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client_class", [Github, Gitlab, Gitea, Bitbucket]) def test_changelog_context_format_w_official_name_filter( example_git_https_url: str, hvcs_client_class: type[Github | Gitlab | Gitea], artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = dedent( """\ {{ "%s" | format_w_official_vcs_name }} {{ "{}" | format_w_official_vcs_name }} {{ "{vcs_name}" | format_w_official_vcs_name }} """ ) with mock.patch.dict(os.environ, {}, clear=True): hvcs_client = hvcs_client_class(remote_url=example_git_https_url) expected_changelog = dedent( f"""\ {hvcs_client.OFFICIAL_NAME} {hvcs_client.OFFICIAL_NAME} {hvcs_client.OFFICIAL_NAME} """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs_client, release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog python-semantic-release-10.4.1/tests/unit/semantic_release/changelog/test_default_changelog.py000066400000000000000000001252411506116242600327640ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING import pytest # NOTE: use backport with newer API from importlib_resources import files import semantic_release from semantic_release.changelog.context import ChangelogMode, make_changelog_context from semantic_release.cli.changelog_writer import render_default_changelog_file from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.commit_parser import ParsedCommit from semantic_release.hvcs import Bitbucket, Gitea, Github, Gitlab if TYPE_CHECKING: from semantic_release.changelog.release_history import ReleaseHistory @pytest.fixture(scope="module") def default_changelog_template() -> str: """Retrieve the semantic-release default changelog template.""" version_notes_template = files(semantic_release.__name__).joinpath( Path("data", "templates", "conventional", "md", "CHANGELOG.md.j2") ) return version_notes_template.read_text(encoding="utf-8") @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template( hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): artificial_release_history.unreleased = {} # Wipe out unreleased hvcs = hvcs_client(example_git_https_url) latest_version = next(iter(artificial_release_history.released.keys())) latest_release = artificial_release_history.released[latest_version] first_version = list(artificial_release_history.released.keys())[-1] feat_commit_obj = latest_release["elements"]["feature"][0] fix_commit_obj_1 = latest_release["elements"]["fix"][0] fix_commit_obj_2 = latest_release["elements"]["fix"][1] fix_commit_obj_3 = latest_release["elements"]["fix"][2] assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{first_version} ({today_date_str})", "", "- Initial Release", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=True, ), changelog_style="conventional", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_a_brk_change( hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], example_git_https_url: str, release_history_w_brk_change: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) releases = iter(release_history_w_brk_change.released.keys()) latest_version = next(releases) latest_release = release_history_w_brk_change.released[latest_version] previous_version = next(releases) previous_release = release_history_w_brk_change.released[previous_version] first_version = list(release_history_w_brk_change.released.keys())[-1] brk_fix_commit_obj = latest_release["elements"]["Bug Fixes"][0] feat_commit_obj = previous_release["elements"]["feature"][0] fix_commit_obj_1 = previous_release["elements"]["fix"][0] fix_commit_obj_2 = previous_release["elements"]["fix"][1] fix_commit_obj_3 = previous_release["elements"]["fix"][2] assert isinstance(brk_fix_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Bug Fixes", "", # Due to the 100 character limit, hash url will be on the second line f"- **{brk_fix_commit_obj.scope}**: {brk_fix_description.capitalize()}", f" ([`{brk_fix_commit_obj.commit.hexsha[:7]}`]({brk_fix_commit_url}))", "", "### Breaking Changes", "", # Currently does not consider the 100 character limit because the current # descriptions are short enough to fit in one line "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize(), ), "", "", f"## v{previous_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{first_version} ({today_date_str})", "", "- Initial Release", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=release_history_w_brk_change, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=True, ), changelog_style="conventional", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_multiple_brk_changes( hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], example_git_https_url: str, release_history_w_multiple_brk_changes: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) releases = iter(release_history_w_multiple_brk_changes.released.keys()) latest_version = next(releases) latest_release = release_history_w_multiple_brk_changes.released[latest_version] previous_version = next(releases) previous_release = release_history_w_multiple_brk_changes.released[previous_version] first_version = list(release_history_w_multiple_brk_changes.released.keys())[-1] brk_feat_commit_obj = latest_release["elements"]["Features"][0] brk_fix_commit_obj = latest_release["elements"]["Bug Fixes"][0] feat_commit_obj = previous_release["elements"]["feature"][0] fix_commit_obj_1 = previous_release["elements"]["fix"][0] fix_commit_obj_2 = previous_release["elements"]["fix"][1] fix_commit_obj_3 = previous_release["elements"]["fix"][2] assert isinstance(brk_feat_commit_obj, ParsedCommit) assert isinstance(brk_fix_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) brk_feat_commit_url = hvcs.commit_hash_url(brk_feat_commit_obj.commit.hexsha) brk_feat_description = str.join("\n", brk_feat_commit_obj.descriptions) brk_feat_brking_description = str.join( "\n", brk_feat_commit_obj.breaking_descriptions ) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Bug Fixes", "", # Due to the 100 character limit, hash url will be on the second line f"- **{brk_fix_commit_obj.scope}**: {brk_fix_description.capitalize()}", f" ([`{brk_fix_commit_obj.commit.hexsha[:7]}`]({brk_fix_commit_url}))", "", "### Features", "", # Due to the 100 character limit, hash url will be on the second line f"- {brk_feat_description.capitalize()}", f" ([`{brk_feat_commit_obj.commit.hexsha[:7]}`]({brk_feat_commit_url}))", "", "### Breaking Changes", "", # Currently does not consider the 100 character limit because the current # descriptions are short enough to fit in one line "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_feat_commit_obj.scope}**: " if brk_feat_commit_obj.scope else "" ), change_desc=brk_feat_brking_description.capitalize(), ), "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize(), ), "", "", f"## v{previous_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{first_version} ({today_date_str})", "", "- Initial Release", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=release_history_w_multiple_brk_changes, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=True, ), changelog_style="conventional", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_no_initial_release_mask( hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): artificial_release_history.unreleased = {} # Wipe out unreleased hvcs = hvcs_client(example_git_https_url) latest_version = next(iter(artificial_release_history.released.keys())) latest_release = artificial_release_history.released[latest_version] first_version = list(artificial_release_history.released.keys())[-1] feat_commit_obj = latest_release["elements"]["feature"][0] fix_commit_obj_1 = latest_release["elements"]["fix"][0] fix_commit_obj_2 = latest_release["elements"]["fix"][1] fix_commit_obj_3 = latest_release["elements"]["fix"][2] assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{first_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ), changelog_style="conventional", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_unreleased_changes( hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) latest_version = next(iter(artificial_release_history.released.keys())) latest_release = artificial_release_history.released[latest_version] first_version = list(artificial_release_history.released.keys())[-1] feat_commit_obj = latest_release["elements"]["feature"][0] fix_commit_obj_1 = latest_release["elements"]["fix"][0] fix_commit_obj_2 = latest_release["elements"]["fix"][1] fix_commit_obj_3 = latest_release["elements"]["fix"][2] assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", "## Unreleased", "", "### Feature", "", f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "", f"## v{latest_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{first_version} ({today_date_str})", "", "- Initial Release", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=True, ), changelog_style="conventional", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_a_notice( hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], example_git_https_url: str, release_history_w_a_notice: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) released_versions = iter(release_history_w_a_notice.released.keys()) latest_version = next(released_versions) prev_version_1 = next(released_versions) prev_version_2 = next(released_versions) latest_release = release_history_w_a_notice.released[latest_version] prev_release_1 = release_history_w_a_notice.released[prev_version_1] notice_commit_obj = next(iter(latest_release["elements"].values()))[0] feat_commit_obj = prev_release_1["elements"]["feature"][0] fix_commit_obj_1 = prev_release_1["elements"]["fix"][0] fix_commit_obj_2 = prev_release_1["elements"]["fix"][1] fix_commit_obj_3 = prev_release_1["elements"]["fix"][2] assert isinstance(notice_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) notice_commit_url = hvcs.commit_hash_url(notice_commit_obj.commit.hexsha) notice_commit_description = str.join("\n", notice_commit_obj.descriptions) notice_description = str.join("\n", notice_commit_obj.release_notices) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Refactoring", "", # Due to the 100 character limit, hash url will be on the second line f"- **{notice_commit_obj.scope}**: {notice_commit_description.capitalize().rstrip()}", f" ([`{notice_commit_obj.commit.hexsha[:7]}`]({notice_commit_url}))", "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), change_desc=notice_description.capitalize().rstrip(), ), "", "", f"## v{prev_version_1} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{prev_version_2} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=release_history_w_a_notice, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ), changelog_style="conventional", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_a_notice_n_brk_change( hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], example_git_https_url: str, release_history_w_notice_n_brk_change: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) released_versions = iter(release_history_w_notice_n_brk_change.released.keys()) latest_version = next(released_versions) prev_version_1 = next(released_versions) prev_version_2 = next(released_versions) latest_release = release_history_w_notice_n_brk_change.released[latest_version] prev_release_1 = release_history_w_notice_n_brk_change.released[prev_version_1] brk_fix_commit_obj = latest_release["elements"]["Bug Fixes"][0] notice_commit_obj = latest_release["elements"]["Refactoring"][0] feat_commit_obj = prev_release_1["elements"]["feature"][0] fix_commit_obj_1 = prev_release_1["elements"]["fix"][0] fix_commit_obj_2 = prev_release_1["elements"]["fix"][1] fix_commit_obj_3 = prev_release_1["elements"]["fix"][2] assert isinstance(brk_fix_commit_obj, ParsedCommit) assert isinstance(notice_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) notice_commit_url = hvcs.commit_hash_url(notice_commit_obj.commit.hexsha) notice_commit_description = str.join("\n", notice_commit_obj.descriptions) notice_description = str.join("\n", notice_commit_obj.release_notices) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Bug Fixes", "", "- {commit_scope}{commit_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), commit_desc=brk_fix_description.capitalize().rstrip(), ), f" ([`{brk_fix_commit_obj.commit.hexsha[:7]}`]({brk_fix_commit_url}))", "", "### Refactoring", "", "- {commit_scope}{commit_desc}".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), commit_desc=notice_commit_description.capitalize().rstrip(), ), f" ([`{notice_commit_obj.commit.hexsha[:7]}`]({notice_commit_url}))", "", "### Breaking Changes", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize().rstrip(), ), "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), change_desc=notice_description.capitalize().rstrip(), ), "", "", f"## v{prev_version_1} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{prev_version_2} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=release_history_w_notice_n_brk_change, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ), changelog_style="conventional", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_multiple_notices( hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], example_git_https_url: str, release_history_w_multiple_notices: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) released_versions = iter(release_history_w_multiple_notices.released.keys()) latest_version = next(released_versions) prev_version_1 = next(released_versions) prev_version_2 = next(released_versions) latest_release = release_history_w_multiple_notices.released[latest_version] prev_release_1 = release_history_w_multiple_notices.released[prev_version_1] feat_notice_commit_obj = latest_release["elements"]["Features"][0] refactor_notice_commit_obj = latest_release["elements"]["Refactoring"][0] feat_commit_obj = prev_release_1["elements"]["feature"][0] fix_commit_obj_1 = prev_release_1["elements"]["fix"][0] fix_commit_obj_2 = prev_release_1["elements"]["fix"][1] fix_commit_obj_3 = prev_release_1["elements"]["fix"][2] assert isinstance(feat_notice_commit_obj, ParsedCommit) assert isinstance(refactor_notice_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) refactor_commit_url = hvcs.commit_hash_url(refactor_notice_commit_obj.commit.hexsha) refactor_commit_desc = str.join("\n", refactor_notice_commit_obj.descriptions) refactor_commit_notice_desc = str.join( "\n", refactor_notice_commit_obj.release_notices ) feat_notice_commit_url = hvcs.commit_hash_url(feat_notice_commit_obj.commit.hexsha) feat_notice_description = str.join("\n", feat_notice_commit_obj.descriptions) feat_commit_notice_desc = str.join("\n", feat_notice_commit_obj.release_notices) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Features", "", "- {commit_scope}{commit_desc}".format( commit_scope=( f"**{feat_notice_commit_obj.scope}**: " if feat_notice_commit_obj.scope else "" ), commit_desc=feat_notice_description.capitalize().rstrip(), ), f" ([`{feat_notice_commit_obj.commit.hexsha[:7]}`]({feat_notice_commit_url}))", "", "### Refactoring", "", "- {commit_scope}{commit_desc}".format( commit_scope=( f"**{refactor_notice_commit_obj.scope}**: " if refactor_notice_commit_obj.scope else "" ), commit_desc=refactor_commit_desc.capitalize().rstrip(), ), f" ([`{refactor_notice_commit_obj.commit.hexsha[:7]}`]({refactor_commit_url}))", "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{refactor_notice_commit_obj.scope}**: " if refactor_notice_commit_obj.scope else "" ), change_desc=refactor_commit_notice_desc.capitalize().rstrip(), ), "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{feat_notice_commit_obj.scope}**: " if feat_notice_commit_obj.scope else "" ), change_desc=str.join( "\n", [ feat_commit_notice_desc.capitalize()[:73].rstrip(), " " + feat_commit_notice_desc[73:].strip(), ], ), ), "", "", f"## v{prev_version_1} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{prev_version_2} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=release_history_w_multiple_notices, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ), changelog_style="conventional", ) assert expected_changelog == actual_changelog python-semantic-release-10.4.1/tests/unit/semantic_release/changelog/test_release_history.py000066400000000000000000000250451506116242600325330ustar00rootroot00000000000000from __future__ import annotations from datetime import datetime from typing import TYPE_CHECKING, NamedTuple import pytest from git import Actor from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.changelog.release_history import ReleaseHistory from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version from tests.const import COMMIT_MESSAGE, CONVENTIONAL_COMMITS_MINOR from tests.fixtures import ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits, repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_no_tags_conventional_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_n_prereleases_conventional_commits, ) from tests.util import add_text_to_file if TYPE_CHECKING: from typing import Protocol from semantic_release.commit_parser.conventional import ConventionalCommitParser from tests.fixtures.git_repo import ( BuiltRepoResult, GetCommitsFromRepoBuildDefFn, RepoDefinition, ) class CreateReleaseHistoryFromRepoDefFn(Protocol): def __call__(self, repo_def: RepoDefinition) -> FakeReleaseHistoryElements: ... # NOTE: not testing parser correctness here, just that the right commits end up # in the right places. So we only compare that the commits with the messages # we anticipate are in the right place, rather than by hash # So we are only using the conventional parser # We are also currently only testing that the "elements" key of the releases # is correct, i.e. the commits are in the right place - the other fields # will need special attention of their own later class FakeReleaseHistoryElements(NamedTuple): """ A fake release history structure that abstracts away the Parser-specific logic and only focuses that the commit messages are in the correct order and place. Where generally a ParsedCommit object exists, here we just use the actual `commit.message`. """ unreleased: dict[str, list[str]] released: dict[Version, dict[str, list[str]]] @pytest.fixture(scope="session") def create_release_history_from_repo_def() -> CreateReleaseHistoryFromRepoDefFn: def _create_release_history_from_repo_def( repo_def: RepoDefinition, ) -> FakeReleaseHistoryElements: # Organize the commits into the expected structure unreleased_history = {} released_history = {} for version_str, version_def in repo_def.items(): commits_per_group: dict[str, list] = { "Unknown": [], } for commit in version_def["commits"]: if commit["category"] not in commits_per_group: commits_per_group[commit["category"]] = [] commits_per_group[commit["category"]].append(commit["msg"].strip()) if version_str == "Unreleased": unreleased_history = commits_per_group continue # handle released versions version = Version.parse(version_str) # add the PSR version commit message commits_per_group["Unknown"].append( COMMIT_MESSAGE.format(version=version).strip() ) # store the organized commits for this version released_history[version] = commits_per_group return FakeReleaseHistoryElements( unreleased=unreleased_history, released=released_history, ) return _create_release_history_from_repo_def @pytest.mark.parametrize( "repo_result", [ # CONVENTIONAL parser lazy_fixture(repo_w_no_tags_conventional_commits.__name__), *[ pytest.param( lazy_fixture(repo_fixture_name), marks=pytest.mark.comprehensive, ) for repo_fixture_name in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_conventional_commits.__name__, # This is not tested because currently unable to disern the commits that were squashed or not # repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, ] ], ], ) @pytest.mark.order("last") def test_release_history( repo_result: BuiltRepoResult, default_conventional_parser: ConventionalCommitParser, file_in_repo: str, create_release_history_from_repo_def: CreateReleaseHistoryFromRepoDefFn, get_commits_from_repo_build_def: GetCommitsFromRepoBuildDefFn, ): repo = repo_result["repo"] expected_release_history = create_release_history_from_repo_def( get_commits_from_repo_build_def( repo_result["definition"], ignore_merge_commits=default_conventional_parser.options.ignore_merge_commits, ) ) expected_released_versions = sorted( map(str, expected_release_history.released.keys()) ) translator = VersionTranslator() # Nothing has unreleased commits currently history = ReleaseHistory.from_git_history( repo, translator, default_conventional_parser, # type: ignore[arg-type] ) released = history.released actual_released_versions = sorted(map(str, released.keys())) assert expected_released_versions == actual_released_versions for k in expected_release_history.released: expected = expected_release_history.released[k] expected_released_messages = str.join( "\n---\n", sorted([msg for bucket in expected.values() for msg in bucket]) ) actual = released[k]["elements"] actual_released_messages = str.join( "\n---\n", sorted( [ str(res.commit.message) for results in actual.values() for res in results ] ), ) assert expected_released_messages == actual_released_messages # PART 2: add some commits to the repo and check that they are in the right place for commit_message in CONVENTIONAL_COMMITS_MINOR: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message) expected_unreleased_messages = str.join( "\n---\n", sorted( [ str(msg).strip() for bucket in [ CONVENTIONAL_COMMITS_MINOR[::-1], *expected_release_history.unreleased.values(), ] for msg in bucket ] ), ) # Now we should have some unreleased commits, and nothing new released new_history = ReleaseHistory.from_git_history( repo, translator, default_conventional_parser, # type: ignore[arg-type] ) new_unreleased = new_history.unreleased new_released = new_history.released actual_unreleased_messages = str.join( "\n---\n", sorted( [ str(res.commit.message) for results in new_unreleased.values() for res in results ] ), ) assert expected_unreleased_messages == actual_unreleased_messages assert ( new_released == released ), "something that shouldn't be considered release has been released" @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_w_no_tags_conventional_commits.__name__), *[ pytest.param( lazy_fixture(repo_fixture_name), marks=pytest.mark.comprehensive, ) for repo_fixture_name in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, ] ], ], ) @pytest.mark.order("last") def test_release_history_releases( repo_result: BuiltRepoResult, default_conventional_parser: ConventionalCommitParser ): new_version = Version.parse("100.10.1") actor = Actor("semantic-release", "semantic-release") release_history = ReleaseHistory.from_git_history( repo=repo_result["repo"], translator=VersionTranslator(), commit_parser=default_conventional_parser, # type: ignore[arg-type] ) tagged_date = datetime.now() new_rh = release_history.release( new_version, committer=actor, tagger=actor, tagged_date=tagged_date, ) assert new_rh is not release_history assert new_rh.unreleased == {} assert new_rh.released == { new_version: { "tagger": actor, "committer": actor, "tagged_date": tagged_date, "elements": release_history.unreleased, "version": new_version, }, **release_history.released, } @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_w_no_tags_conventional_commits.__name__), *[ pytest.param( lazy_fixture(repo_fixture_name), marks=pytest.mark.comprehensive, ) for repo_fixture_name in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, ] ], ], ) @pytest.mark.order("last") def test_all_matching_repo_tags_are_released( repo_result: BuiltRepoResult, default_conventional_parser: ConventionalCommitParser ): repo = repo_result["repo"] translator = VersionTranslator() release_history = ReleaseHistory.from_git_history( repo=repo, translator=translator, commit_parser=default_conventional_parser, # type: ignore[arg-type] ) for tag in repo.tags: assert translator.from_tag(tag.name) in release_history.released python-semantic-release-10.4.1/tests/unit/semantic_release/changelog/test_release_notes.py000066400000000000000000001076111506116242600321620ustar00rootroot00000000000000from __future__ import annotations import os from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING from unittest import mock import pytest # NOTE: use backport with newer API to support 3.7 from importlib_resources import files import semantic_release from semantic_release.cli.changelog_writer import generate_release_notes from semantic_release.commit_parser.token import ParsedCommit from semantic_release.hvcs import Bitbucket, Gitea, Github, Gitlab if TYPE_CHECKING: from semantic_release.changelog.release_history import ReleaseHistory from tests.fixtures.example_project import ExProjectDir @pytest.fixture(scope="module") def release_notes_template() -> str: """Retrieve the semantic-release default release notes template.""" version_notes_template = files(semantic_release.__name__).joinpath( Path("data", "templates", "conventional", "md", ".release_notes.md.j2") ) return version_notes_template.read_text(encoding="utf-8") @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("license_name", ["", "MIT"]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], license_name: str, artificial_release_history: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(artificial_release_history.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = artificial_release_history.released[version] feat_commit_obj = release["elements"]["feature"][0] fix_commit_obj_1 = release["elements"]["fix"][0] fix_commit_obj_2 = release["elements"]["fix"][1] fix_commit_obj_3 = release["elements"]["fix"][2] assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", *( [""] if not license_name else [ "", f"_This release is published under the {license_name} License._", "", ] ), "### Feature", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{feat_commit_obj.scope}**: " if feat_commit_obj.scope else "" ), commit_desc=feat_description.capitalize(), short_hash=feat_commit_obj.commit.hexsha[:7], url=feat_commit_url, ), "", "### Fix", "", # Commit 2 is first because it has no scope "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{fix_commit_obj_2.scope}**: " if fix_commit_obj_2.scope else "" ), commit_desc=fix_commit_2_description.capitalize(), short_hash=fix_commit_obj_2.commit.hexsha[:7], url=fix_commit_2_url, ), "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{fix_commit_obj_3.scope}**: " if fix_commit_obj_3.scope else "" ), commit_desc=fix_commit_3_description.capitalize(), short_hash=fix_commit_obj_3.commit.hexsha[:7], url=fix_commit_3_url, ), "", # Commit 1 is last "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{fix_commit_obj_1.scope}**: " if fix_commit_obj_1.scope else "" ), commit_desc=fix_commit_1_description.capitalize(), short_hash=fix_commit_obj_1.commit.hexsha[:7], url=fix_commit_1_url, ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=artificial_release_history, style="conventional", mask_initial_release=mask_initial_release, license_name=license_name, ) assert expected_content == actual_content @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_w_a_brk_description( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], release_history_w_brk_change: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(release_history_w_brk_change.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = release_history_w_brk_change.released[version] brk_fix_commit_obj = next(iter(release["elements"].values()))[0] assert isinstance(brk_fix_commit_obj, ParsedCommit) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", "", "### Bug Fixes", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), commit_desc=brk_fix_description.capitalize(), short_hash=brk_fix_commit_obj.commit.hexsha[:7], url=brk_fix_commit_url, ), "", "### Breaking Changes", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize(), ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=release_history_w_brk_change, style="conventional", mask_initial_release=mask_initial_release, ) assert expected_content == actual_content @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_w_multiple_brk_changes( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], release_history_w_multiple_brk_changes: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(release_history_w_multiple_brk_changes.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = release_history_w_multiple_brk_changes.released[version] brk_fix_commit_obj = release["elements"]["Bug Fixes"][0] brk_feat_commit_obj = release["elements"]["Features"][0] assert isinstance(brk_fix_commit_obj, ParsedCommit) assert isinstance(brk_feat_commit_obj, ParsedCommit) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) brk_feat_commit_url = hvcs.commit_hash_url(brk_feat_commit_obj.commit.hexsha) brk_feat_description = str.join("\n", brk_feat_commit_obj.descriptions) brk_feat_brking_description = str.join( "\n", brk_feat_commit_obj.breaking_descriptions ) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", "", "### Bug Fixes", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), commit_desc=brk_fix_description.capitalize(), short_hash=brk_fix_commit_obj.commit.hexsha[:7], url=brk_fix_commit_url, ), "", "### Features", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{brk_feat_commit_obj.scope}**: " if brk_feat_commit_obj.scope else "" ), commit_desc=brk_feat_description.capitalize(), short_hash=brk_feat_commit_obj.commit.hexsha[:7], url=brk_feat_commit_url, ), "", "### Breaking Changes", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_feat_commit_obj.scope}**: " if brk_feat_commit_obj.scope else "" ), change_desc=brk_feat_brking_description.capitalize(), ), "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize(), ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=release_history_w_multiple_brk_changes, style="conventional", mask_initial_release=mask_initial_release, ) assert expected_content == actual_content @pytest.mark.parametrize("license_name", ["", "MIT"]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_first_release_masked( example_git_https_url: str, hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], license_name: str, single_release_history: ReleaseHistory, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ hvcs = hvcs_client(example_git_https_url) version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", *( [""] if not license_name else [ "", f"_This release is published under the {license_name} License._", "", ] ), "- Initial Release", "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs, release=release, template_dir=Path(""), history=single_release_history, style="conventional", mask_initial_release=True, license_name=license_name, ) assert expected_content == actual_content @pytest.mark.parametrize("license_name", ["", "MIT"]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_first_release_unmasked( example_git_https_url: str, hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], license_name: str, single_release_history: ReleaseHistory, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ hvcs = hvcs_client(example_git_https_url) version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] feat_commit_obj = release["elements"]["feature"][0] assert isinstance(feat_commit_obj, ParsedCommit) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", *( [""] if not license_name else [ "", f"_This release is published under the {license_name} License._", "", ] ), "### Feature", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{feat_commit_obj.scope}**: " if feat_commit_obj.scope else "" ), commit_desc=feat_description.capitalize(), short_hash=feat_commit_obj.commit.hexsha[:7], url=feat_commit_url, ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs, release=release, template_dir=Path(""), history=single_release_history, style="conventional", mask_initial_release=False, license_name=license_name, ) assert expected_content == actual_content def test_release_notes_context_sort_numerically_filter( example_git_https_url: str, single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( dedent( """\ {{ [ ".. _#5: link", ".. _PR#3: link", ".. _PR#10: link", ".. _#100: link" ] | sort_numerically | join("\\n") }} """ ) ) expected_content = str.join( os.linesep, dedent( """\ .. _#5: link .. _#100: link .. _PR#3: link .. _PR#10: link """ ).split("\n"), ) actual_content = generate_release_notes( hvcs_client=Github(remote_url=example_git_https_url), release=release, template_dir=example_project_dir, history=single_release_history, style="conventional", mask_initial_release=False, ) assert expected_content == actual_content def test_release_notes_context_sort_numerically_filter_reversed( example_git_https_url: str, single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( dedent( """\ {{ [ ".. _#5: link", ".. _PR#3: link", ".. _PR#10: link", ".. _#100: link" ] | sort_numerically(reverse=True) | join("\\n") }} """ ) ) expected_content = str.join( os.linesep, dedent( """\ .. _#100: link .. _#5: link .. _PR#10: link .. _PR#3: link """ ).split("\n"), ) actual_content = generate_release_notes( hvcs_client=Github(remote_url=example_git_https_url), release=release, template_dir=example_project_dir, history=single_release_history, style="conventional", mask_initial_release=False, ) assert expected_content == actual_content def test_release_notes_context_pypi_url_filter( example_git_https_url: str, single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( """{{ "example-package" | create_pypi_url }}""" ) expected_content = f"https://pypi.org/project/example-package{os.linesep}" actual_content = generate_release_notes( hvcs_client=Github(remote_url=example_git_https_url), release=release, template_dir=example_project_dir, history=single_release_history, style="conventional", mask_initial_release=False, ) assert expected_content == actual_content def test_release_notes_context_pypi_url_filter_tagged( example_git_https_url: str, single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( """{{ "example-package" | create_pypi_url(release.version | string) }}""" ) expected_content = f"https://pypi.org/project/example-package/{version}{os.linesep}" actual_content = generate_release_notes( hvcs_client=Github(remote_url=example_git_https_url), release=release, template_dir=example_project_dir, history=single_release_history, style="conventional", mask_initial_release=False, ) assert expected_content == actual_content @pytest.mark.parametrize("hvcs_client_class", [Github, Gitlab, Gitea]) def test_release_notes_context_release_url_filter( example_git_https_url: str, hvcs_client_class: type[Github | Gitlab | Gitea], single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( dedent( """\ {{ "[%s](%s)" | format( release.version.as_tag(), release.version.as_tag() | create_release_url, ) }} """ ) ) with mock.patch.dict(os.environ, {}, clear=True): hvcs_client = hvcs_client_class(remote_url=example_git_https_url) expected_content = dedent( f"""\ [{version.as_tag()}]({hvcs_client.create_release_url(version.as_tag())}) """ ) actual_content = generate_release_notes( hvcs_client=hvcs_client, release=release, template_dir=example_project_dir, history=single_release_history, style="conventional", mask_initial_release=False, ) assert expected_content == actual_content @pytest.mark.parametrize("hvcs_client_class", [Github, Gitlab, Gitea, Bitbucket]) def test_release_notes_context_format_w_official_name_filter( example_git_https_url: str, hvcs_client_class: type[Github | Gitlab | Gitea], single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( dedent( """\ {{ "%s" | format_w_official_vcs_name }} {{ "{}" | format_w_official_vcs_name }} {{ "{vcs_name}" | format_w_official_vcs_name }} """ ) ) with mock.patch.dict(os.environ, {}, clear=True): hvcs_client = hvcs_client_class(remote_url=example_git_https_url) expected_content = dedent( f"""\ {hvcs_client.OFFICIAL_NAME} {hvcs_client.OFFICIAL_NAME} {hvcs_client.OFFICIAL_NAME} """ ) actual_content = generate_release_notes( hvcs_client=hvcs_client, release=release, template_dir=example_project_dir, history=single_release_history, style="conventional", mask_initial_release=False, ) assert expected_content == actual_content @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_w_a_notice( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], release_history_w_a_notice: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(release_history_w_a_notice.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = release_history_w_a_notice.released[version] notice_commit_obj = next(iter(release["elements"].values()))[0] assert isinstance(notice_commit_obj, ParsedCommit) notice_commit_url = hvcs.commit_hash_url(notice_commit_obj.commit.hexsha) notice_commit_description = str.join("\n", notice_commit_obj.descriptions) notice_description = str.join("\n", notice_commit_obj.release_notices) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", "", "### Refactoring", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), commit_desc=notice_commit_description.capitalize().rstrip(), short_hash=notice_commit_obj.commit.hexsha[:7], url=notice_commit_url, ), "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), change_desc=notice_description.capitalize().rstrip(), ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=release_history_w_a_notice, style="conventional", mask_initial_release=mask_initial_release, ) assert expected_content == actual_content @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_w_a_notice_n_brk_change( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], release_history_w_notice_n_brk_change: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(release_history_w_notice_n_brk_change.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = release_history_w_notice_n_brk_change.released[version] brk_fix_commit_obj = release["elements"]["Bug Fixes"][0] notice_commit_obj = release["elements"]["Refactoring"][0] assert isinstance(brk_fix_commit_obj, ParsedCommit) assert isinstance(notice_commit_obj, ParsedCommit) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) notice_commit_url = hvcs.commit_hash_url(notice_commit_obj.commit.hexsha) notice_commit_description = str.join("\n", notice_commit_obj.descriptions) notice_description = str.join("\n", notice_commit_obj.release_notices) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", "", "### Bug Fixes", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), commit_desc=brk_fix_description.capitalize().rstrip(), short_hash=brk_fix_commit_obj.commit.hexsha[:7], url=brk_fix_commit_url, ), "", "### Refactoring", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), commit_desc=notice_commit_description.capitalize().rstrip(), short_hash=notice_commit_obj.commit.hexsha[:7], url=notice_commit_url, ), "", "### Breaking Changes", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize().rstrip(), ), "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), change_desc=notice_description.capitalize().rstrip(), ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=release_history_w_notice_n_brk_change, style="conventional", mask_initial_release=mask_initial_release, ) assert expected_content == actual_content @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_w_multiple_notices( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], release_history_w_multiple_notices: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(release_history_w_multiple_notices.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = release_history_w_multiple_notices.released[version] feat_commit_obj = release["elements"]["Features"][0] refactor_commit_obj = release["elements"]["Refactoring"][0] assert isinstance(refactor_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) refactor_commit_url = hvcs.commit_hash_url(refactor_commit_obj.commit.hexsha) refactor_commit_desc = str.join("\n", refactor_commit_obj.descriptions) refactor_commit_notice_desc = str.join("\n", refactor_commit_obj.release_notices) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) feat_commit_notice_desc = str.join("\n", feat_commit_obj.release_notices) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", "", "### Features", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{feat_commit_obj.scope}**: " if feat_commit_obj.scope else "" ), commit_desc=feat_description.capitalize().rstrip(), short_hash=feat_commit_obj.commit.hexsha[:7], url=feat_commit_url, ), "", "### Refactoring", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{refactor_commit_obj.scope}**: " if refactor_commit_obj.scope else "" ), commit_desc=refactor_commit_desc.capitalize().rstrip(), short_hash=refactor_commit_obj.commit.hexsha[:7], url=refactor_commit_url, ), "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{refactor_commit_obj.scope}**: " if refactor_commit_obj.scope else "" ), change_desc=refactor_commit_notice_desc.capitalize().rstrip(), ), "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{feat_commit_obj.scope}**: " if feat_commit_obj.scope else "" ), change_desc=feat_commit_notice_desc.capitalize().rstrip(), ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=release_history_w_multiple_notices, style="conventional", mask_initial_release=mask_initial_release, ) assert expected_content == actual_content python-semantic-release-10.4.1/tests/unit/semantic_release/changelog/test_template.py000066400000000000000000000044521506116242600311440ustar00rootroot00000000000000from __future__ import annotations # TODO: This tests for the main options that will help configuring a template, # but not all of them. The testing can be expanded to cover all the options later. # It's not super essential as Jinja2 does most of the testing, we're just checking # that we can properly set the right strings in the template environment. from textwrap import dedent from typing import TYPE_CHECKING import pytest from semantic_release.changelog.template import environment if TYPE_CHECKING: from typing import Any EXAMPLE_TEMPLATE_FORMAT_STR = """

This is an example template document

The title is {variable_start_string} title | upper {variable_end_string}

{comment_start_string}- This text should not appear {comment_end_string} {block_start_string}- for subject in subjects {block_end_string}

This is a paragraph about {variable_start_string} subject {variable_end_string}

{block_start_string}- endfor {block_end_string}""" @pytest.mark.parametrize( "format_map", [ { "block_start_string": "{%", "block_end_string": "%}", "variable_start_string": "{{", "variable_end_string": "}}", "comment_start_string": "{#", "comment_end_string": "#}", }, { "block_start_string": "{[", "block_end_string": "]}", "variable_start_string": "{{", "variable_end_string": "}}", "comment_start_string": "/*", "comment_end_string": "*/", }, ], ) @pytest.mark.parametrize( "subjects", [("dogs", "cats"), ("stocks", "finance", "politics")] ) def test_template_env_configurable(format_map: dict[str, Any], subjects: tuple[str]): template_as_str = EXAMPLE_TEMPLATE_FORMAT_STR.format_map(format_map) env = environment(**format_map) template = env.from_string(template_as_str) title = "important" newline = "\n" expected_result = dedent( f"""

This is an example template document

The title is {title.upper()}

{(newline + " " * 8).join(f'

This is a paragraph about {subject}

' for subject in subjects)}""" # noqa: E501 ) actual_result = template.render(title="important", subjects=subjects) assert expected_result == actual_result python-semantic-release-10.4.1/tests/unit/semantic_release/changelog/test_template_render.py000066400000000000000000000117311506116242600325010ustar00rootroot00000000000000from __future__ import annotations import itertools import os from typing import TYPE_CHECKING import pytest from semantic_release.changelog.template import environment, recursive_render if TYPE_CHECKING: from pathlib import Path from tests.fixtures.example_project import ExProjectDir NORMAL_TEMPLATE_SRC = """--- content: - a string - ["a nested list"] vars: # a comment hello: {{ "world" | upper }} """ NORMAL_TEMPLATE_RENDERED = """--- content: - a string - ["a nested list"] vars: # a comment hello: WORLD """ PLAINTEXT_FILE_CONTENT = """ I should not be rendered as a template. {{ "this string should be untouched" | upper }} """ def _strip_trailing_j2(path: Path) -> Path: if path.name.endswith(".j2"): return path.with_name(path.name[:-3]) return path @pytest.fixture def normal_template(example_project_template_dir: Path) -> Path: template = example_project_template_dir / "normal.yaml.j2" template.parent.mkdir(parents=True, exist_ok=True) template.write_text(NORMAL_TEMPLATE_SRC) return template @pytest.fixture def long_directory_path(example_project_template_dir: Path) -> Path: # NOTE: fixture enables using Path rather than # constant string, so no issue with / vs \ on Windows return example_project_template_dir / "long" / "dir" / "path" @pytest.fixture def deeply_nested_file(long_directory_path: Path) -> Path: file = long_directory_path / "buried.txt" file.parent.mkdir(parents=True, exist_ok=True) file.write_text(PLAINTEXT_FILE_CONTENT) return file @pytest.fixture def hidden_file(example_project_template_dir: Path) -> Path: file = example_project_template_dir / ".hidden" file.parent.mkdir(parents=True, exist_ok=True) file.write_text("I shouldn't be present") return file @pytest.fixture def directory_path_with_hidden_subfolder(example_project_template_dir: Path) -> Path: return example_project_template_dir / "path" / ".subfolder" / "hidden" @pytest.fixture def excluded_file(directory_path_with_hidden_subfolder: Path) -> Path: file = directory_path_with_hidden_subfolder / "excluded.txt" file.parent.mkdir(parents=True, exist_ok=True) file.write_text("I shouldn't be present") return file @pytest.mark.usefixtures(excluded_file.__name__) def test_recursive_render( init_example_project: None, example_project_dir: Path, example_project_template_dir: Path, normal_template, deeply_nested_file, hidden_file, ): tmpl_dir = str(example_project_template_dir.resolve()) env = environment(template_dir=tmpl_dir) preexisting_paths = set(example_project_dir.rglob("**/*")) recursive_render( template_dir=example_project_template_dir.resolve(), environment=env, _root_dir=str(example_project_dir.resolve()), ) rendered_normal_template = _strip_trailing_j2( example_project_dir / normal_template.relative_to(example_project_template_dir) ) assert rendered_normal_template.exists() assert rendered_normal_template.read_text() == NORMAL_TEMPLATE_RENDERED rendered_deeply_nested = example_project_dir / deeply_nested_file.relative_to( example_project_template_dir ) assert rendered_deeply_nested.exists() assert rendered_deeply_nested.read_text() == PLAINTEXT_FILE_CONTENT rendered_hidden = example_project_dir / hidden_file.relative_to( example_project_template_dir ) assert not rendered_hidden.exists() assert not (example_project_dir / "path").exists() assert set(example_project_dir.rglob("**/*")) == preexisting_paths.union( example_project_dir / p for t in ( rendered_normal_template, rendered_deeply_nested, ) for p in itertools.accumulate( t.relative_to(example_project_dir).parts, func=lambda *a: os.sep.join(a) ) ) @pytest.fixture def dotfolder_template_dir(example_project_dir: ExProjectDir) -> Path: return example_project_dir / ".templates/.psr-templates" @pytest.fixture def dotfolder_template( init_example_project: None, dotfolder_template_dir: Path ) -> Path: tmpl = dotfolder_template_dir / "template.txt" tmpl.parent.mkdir(parents=True, exist_ok=True) tmpl.write_text("I am a template") return tmpl def test_recursive_render_with_top_level_dotfolder( init_example_project: None, example_project_dir: ExProjectDir, dotfolder_template: Path, dotfolder_template_dir: Path, ): preexisting_paths = set(example_project_dir.rglob("**/*")) env = environment(template_dir=dotfolder_template_dir.resolve()) recursive_render( template_dir=dotfolder_template_dir.resolve(), environment=env, _root_dir=example_project_dir.resolve(), ) rendered_template = example_project_dir / dotfolder_template.name assert rendered_template.exists() assert set(example_project_dir.rglob("**/*")) == preexisting_paths.union( {example_project_dir / rendered_template} ) python-semantic-release-10.4.1/tests/unit/semantic_release/cli/000077500000000000000000000000001506116242600245335ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/cli/__init__.py000066400000000000000000000000001506116242600266320ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/cli/test_config.py000066400000000000000000000346061506116242600274220ustar00rootroot00000000000000from __future__ import annotations import os import shutil import sys from pathlib import Path, PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING from unittest import mock import pytest import tomlkit from pydantic import RootModel, ValidationError from urllib3.util.url import parse_url import semantic_release from semantic_release.cli.config import ( BranchConfig, ChangelogConfig, ChangelogOutputFormat, GlobalCommandLineOptions, HvcsClient, RawConfig, RuntimeContext, _known_hvcs, ) from semantic_release.cli.util import load_raw_config_file from semantic_release.commit_parser.conventional import ConventionalCommitParserOptions from semantic_release.commit_parser.emoji import EmojiParserOptions from semantic_release.commit_parser.scipy import ScipyParserOptions from semantic_release.commit_parser.tag import TagParserOptions from semantic_release.const import DEFAULT_COMMIT_AUTHOR from semantic_release.enums import LevelBump from semantic_release.errors import ParserLoadError from tests.fixtures.repos import repo_w_no_tags_conventional_commits from tests.util import ( CustomParserOpts, CustomParserWithNoOpts, CustomParserWithOpts, IncompleteCustomParser, ) if TYPE_CHECKING: from typing import Any from tests.fixtures.example_project import ExProjectDir, UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuildRepoFn, BuiltRepoResult, CommitConvention @pytest.mark.parametrize( "patched_os_environ, remote_config, expected_token", [ ( {"GH_TOKEN": "mytoken"}, {"type": HvcsClient.GITHUB.value}, "mytoken", ), ( {"GITLAB_TOKEN": "mytoken"}, {"type": HvcsClient.GITLAB.value}, "mytoken", ), ( {"GITEA_TOKEN": "mytoken"}, {"type": HvcsClient.GITEA.value}, "mytoken", ), ( # default not provided -> means Github {"GH_TOKEN": "mytoken"}, {}, "mytoken", ), ( {"CUSTOM_TOKEN": "mytoken"}, {"type": HvcsClient.GITHUB.value, "token": {"env": "CUSTOM_TOKEN"}}, "mytoken", ), ], ) def test_load_hvcs_default_token( patched_os_environ: dict[str, str], remote_config: dict[str, Any], expected_token: str, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): raw_config = RawConfig.model_validate( { "remote": remote_config, } ) assert expected_token == raw_config.remote.token @pytest.mark.parametrize("remote_config", [{"type": "nonexistent"}]) def test_invalid_hvcs_type(remote_config: dict[str, Any]): with pytest.raises(ValidationError) as excinfo: RawConfig.model_validate( { "remote": remote_config, } ) assert "remote.type" in str(excinfo.value) @pytest.mark.parametrize( "commit_parser, expected_parser_opts", [ ( None, RootModel(ConventionalCommitParserOptions()).model_dump(), ), # default not provided -> means conventional ("conventional", RootModel(ConventionalCommitParserOptions()).model_dump()), ("emoji", RootModel(EmojiParserOptions()).model_dump()), ("scipy", RootModel(ScipyParserOptions()).model_dump()), ("tag", RootModel(TagParserOptions()).model_dump()), (f"{CustomParserWithNoOpts.__module__}:{CustomParserWithNoOpts.__name__}", {}), ( f"{CustomParserWithOpts.__module__}:{CustomParserWithOpts.__name__}", RootModel(CustomParserOpts()).model_dump(), ), ], ) def test_load_default_parser_opts( commit_parser: str | None, expected_parser_opts: dict[str, Any] ): raw_config = RawConfig.model_validate( # Since TOML does not support NoneTypes, we need to not include the key {"commit_parser": commit_parser} if commit_parser else {} ) assert expected_parser_opts == raw_config.commit_parser_options def test_load_user_defined_parser_opts(): user_defined_opts = { "allowed_tags": ["foo", "bar", "baz"], "minor_tags": ["bar"], "patch_tags": ["baz"], "default_bump_level": LevelBump.PATCH.value, } raw_config = RawConfig.model_validate( { "commit_parser": "conventional", "commit_parser_options": user_defined_opts, } ) assert user_defined_opts == raw_config.commit_parser_options @pytest.mark.parametrize("commit_parser", [""]) def test_invalid_commit_parser_value(commit_parser: str): with pytest.raises(ValidationError) as excinfo: RawConfig.model_validate( { "commit_parser": commit_parser, } ) assert "commit_parser" in str(excinfo.value) def test_default_toml_config_valid(example_project_dir: ExProjectDir): default_config_file = example_project_dir / "default.toml" default_config_file.write_text( tomlkit.dumps(RawConfig().model_dump(mode="json", exclude_none=True)) ) written = default_config_file.read_text(encoding="utf-8") loaded = tomlkit.loads(written).unwrap() # Check that we can load it correctly parsed = RawConfig.model_validate(loaded) assert parsed # Check the re-loaded internal representation is sufficient # There is an issue with BaseModel.__eq__ that means # comparing directly doesn't work with parsed.dict(); this # is because of how tomlkit parsed toml @pytest.mark.parametrize( "mock_env, expected_author", [ ({}, DEFAULT_COMMIT_AUTHOR), ({"GIT_COMMIT_AUTHOR": "foo "}, "foo "), ], ) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_commit_author_configurable( example_pyproject_toml: Path, mock_env: dict[str, str], expected_author: str, change_to_ex_proj_dir: None, ): content = tomlkit.loads(example_pyproject_toml.read_text(encoding="utf-8")).unwrap() with mock.patch.dict(os.environ, mock_env): raw = RawConfig.model_validate(content) runtime = RuntimeContext.from_raw_config( raw=raw, global_cli_options=GlobalCommandLineOptions(), ) resulting_author = ( f"{runtime.commit_author.name} <{runtime.commit_author.email}>" ) assert expected_author == resulting_author def test_load_valid_runtime_config( build_configured_base_repo: BuildRepoFn, example_project_dir: ExProjectDir, example_pyproject_toml: Path, update_pyproject_toml: UpdatePyprojectTomlFn, change_to_ex_proj_dir: None, ): build_configured_base_repo(example_project_dir) # Wipe out any existing configuration options update_pyproject_toml(f"tool.{semantic_release.__name__}", {}) runtime_ctx = RuntimeContext.from_raw_config( RawConfig.model_validate(load_raw_config_file(example_pyproject_toml)), global_cli_options=GlobalCommandLineOptions(), ) # TODO: add more validation assert runtime_ctx @pytest.mark.parametrize( "commit_parser", [ # Module:Class string f"{CustomParserWithNoOpts.__module__}:{CustomParserWithNoOpts.__name__}", f"{CustomParserWithOpts.__module__}:{CustomParserWithOpts.__name__}", # File path module:Class string f"{CustomParserWithNoOpts.__module__.replace('.', '/')}.py:{CustomParserWithNoOpts.__name__}", f"{CustomParserWithOpts.__module__.replace('.', '/')}.py:{CustomParserWithOpts.__name__}", ], ) def test_load_valid_runtime_config_w_custom_parser( commit_parser: CommitConvention, build_configured_base_repo: BuildRepoFn, example_project_dir: ExProjectDir, example_pyproject_toml: Path, change_to_ex_proj_dir: None, request: pytest.FixtureRequest, ): fake_sys_modules = {**sys.modules} if ".py" in commit_parser: module_filepath = Path(commit_parser.split(":")[0]) module_filepath.parent.mkdir(parents=True, exist_ok=True) module_filepath.parent.joinpath("__init__.py").touch() shutil.copy( src=str(request.config.rootpath / module_filepath), dst=str(module_filepath), ) fake_sys_modules.pop( str(Path(module_filepath).with_suffix("")).replace(os.sep, ".") ) build_configured_base_repo( example_project_dir, commit_type=commit_parser, ) with mock.patch.dict(sys.modules, fake_sys_modules, clear=True): assert RuntimeContext.from_raw_config( RawConfig.model_validate(load_raw_config_file(example_pyproject_toml)), global_cli_options=GlobalCommandLineOptions(), ) @pytest.mark.parametrize( "commit_parser", [ # Non-existant module "tests.missing_module:CustomParser", # Non-existant class f"{CustomParserWithOpts.__module__}:MissingCustomParser", # Incomplete class implementation f"{IncompleteCustomParser.__module__}:{IncompleteCustomParser.__name__}", # Non-existant module file "tests/missing_module.py:CustomParser", # Non-existant class in module file f"{CustomParserWithOpts.__module__.replace('.', '/')}.py:MissingCustomParser", # Incomplete class implementation in module file f"{IncompleteCustomParser.__module__.replace('.', '/')}.py:{IncompleteCustomParser.__name__}", ], ) def test_load_invalid_custom_parser( commit_parser: str, build_configured_base_repo: BuildRepoFn, example_project_dir: ExProjectDir, example_pyproject_toml: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, change_to_ex_proj_dir: None, ): build_configured_base_repo(example_project_dir) # Wipe out any existing configuration options update_pyproject_toml(f"{pyproject_toml_config_option_parser}_options", {}) # Insert invalid custom parser string into configuration update_pyproject_toml(pyproject_toml_config_option_parser, commit_parser) with pytest.raises(ParserLoadError): RuntimeContext.from_raw_config( RawConfig.model_validate(load_raw_config_file(example_pyproject_toml)), global_cli_options=GlobalCommandLineOptions(), ) def test_branch_config_with_plain_wildcard(): branch_config = BranchConfig( match="*", ) assert branch_config.match == ".*" @pytest.mark.parametrize( "invalid_regex", [ "*abc", "[a-z", "(.+", "{2,3}", "a{3,2}", ], ) def test_branch_config_with_invalid_regex(invalid_regex: str): with pytest.raises(ValidationError): BranchConfig( match=invalid_regex, ) @pytest.mark.parametrize( "valid_patterns", [ # Single entry [r"chore(?:\([^)]*?\))?: .+"], # Multiple entries [r"^\d+\.\d+\.\d+", r"Initial [Cc]ommit.*"], ], ) def test_changelog_config_with_valid_exclude_commit_patterns(valid_patterns: list[str]): assert ChangelogConfig.model_validate( { "exclude_commit_patterns": valid_patterns, } ) @pytest.mark.parametrize( "invalid_patterns, index_of_invalid_pattern", [ # Single entry, single incorrect (["*abc"], 0), # Two entries, second incorrect ([".*", "[a-z"], 1), # Two entries, first incorrect (["(.+", ".*"], 0), ], ) def test_changelog_config_with_invalid_exclude_commit_patterns( invalid_patterns: list[str], index_of_invalid_pattern: int, ): with pytest.raises( ValidationError, match=regexp( str.join( "", [ r".*\bexclude_commit_patterns\[", str(index_of_invalid_pattern), r"\]: Invalid regular expression", ], ), ), ): ChangelogConfig.model_validate( { "exclude_commit_patterns": invalid_patterns, } ) @pytest.mark.parametrize( "output_format, insertion_flag", [ ( ChangelogOutputFormat.MARKDOWN.value, "", ), ( ChangelogOutputFormat.RESTRUCTURED_TEXT.value, f"..{os.linesep} version list", ), ], ) def test_changelog_config_default_insertion_flag( output_format: str, insertion_flag: str, ): changelog_config = ChangelogConfig.model_validate( { "default_templates": { "output_format": output_format, } } ) assert changelog_config.insertion_flag == insertion_flag @pytest.mark.parametrize( "hvcs_type", [k.value for k in _known_hvcs], ) def test_git_remote_url_w_insteadof_alias( repo_w_initial_commit: BuiltRepoResult, example_pyproject_toml: Path, example_git_https_url: str, hvcs_type: str, update_pyproject_toml: UpdatePyprojectTomlFn, ): expected_url = parse_url(example_git_https_url) repo_name_suffix = PurePosixPath(expected_url.path or "").name insteadof_alias = "psr_test_insteadof" insteadof_value = expected_url.url.replace(repo_name_suffix, "") repo = repo_w_initial_commit["repo"] with repo.config_writer() as cfg: # Setup: define the insteadOf replacement value cfg.add_value(f'url "{insteadof_value}"', "insteadof", f"{insteadof_alias}:") # Setup: set the remote URL with an insteadOf alias cfg.set_value('remote "origin"', "url", f"{insteadof_alias}:{repo_name_suffix}") # Setup: set each supported HVCS client type update_pyproject_toml("tool.semantic_release.remote.type", hvcs_type) # Act: load the configuration (in clear environment) with mock.patch.dict(os.environ, {}, clear=True): # Essentially the same as CliContextObj._init_runtime_ctx() project_config = tomlkit.loads( example_pyproject_toml.read_text(encoding="utf-8") ).unwrap() runtime = RuntimeContext.from_raw_config( raw=RawConfig.model_validate( project_config.get("tool", {}).get("semantic_release", {}), ), global_cli_options=GlobalCommandLineOptions(), ) # Trigger a function that calls helpers.parse_git_url() actual_url = runtime.hvcs_client.remote_url(use_token=False) # Evaluate: the remote URL should be the full URL assert expected_url.url == actual_url python-semantic-release-10.4.1/tests/unit/semantic_release/cli/test_github_actions_output.py000066400000000000000000000123711506116242600325720ustar00rootroot00000000000000from __future__ import annotations import os from textwrap import dedent from typing import TYPE_CHECKING from unittest import mock import pytest from semantic_release.cli.github_actions_output import VersionGitHubActionsOutput from semantic_release.hvcs.github import Github from semantic_release.version.version import Version from tests.const import EXAMPLE_HVCS_DOMAIN, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER from tests.util import actions_output_to_dict if TYPE_CHECKING: from pathlib import Path BASE_VCS_URL = f"https://{EXAMPLE_HVCS_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}" @pytest.mark.parametrize( "prev_version, version, released, is_prerelease", [ ("1.2.2", "1.2.3", True, False), ("1.2.2", "1.2.3-alpha.1", True, True), ("1.2.2", "1.2.2", False, False), ("1.2.2-alpha.1", "1.2.2-alpha.1", False, True), (None, "1.2.3", True, False), ], ) def test_version_github_actions_output_format( released: bool, version: str, is_prerelease: bool, prev_version: str ): commit_sha = "0" * 40 # 40 zeroes to simulate a SHA-1 hash release_notes = dedent( """\ ## Changes - Added new feature - Fixed bug """ ) expected_output = ( dedent( f"""\ released={'true' if released else 'false'} version={version} tag=v{version} is_prerelease={'true' if is_prerelease else 'false'} link={BASE_VCS_URL}/releases/tag/v{version} previous_version={prev_version or ""} commit_sha={commit_sha} """ ) + f"release_notes< actual) assert expected_output == actual_output_text def test_version_github_actions_output_fails_if_missing_released_param(): output = VersionGitHubActionsOutput( gh_client=Github(f"{BASE_VCS_URL}.git"), version=Version.parse("1.2.3"), ) # Execute with expected failure with pytest.raises(ValueError, match="required outputs were not set"): output.to_output_text() def test_version_github_actions_output_fails_if_missing_commit_sha_param(): output = VersionGitHubActionsOutput( gh_client=Github(f"{BASE_VCS_URL}.git"), released=True, version=Version.parse("1.2.3"), ) # Execute with expected failure with pytest.raises(ValueError, match="required outputs were not set"): output.to_output_text() def test_version_github_actions_output_fails_if_missing_release_notes_param(): output = VersionGitHubActionsOutput( gh_client=Github(f"{BASE_VCS_URL}.git"), released=True, version=Version.parse("1.2.3"), ) # Execute with expected failure with pytest.raises(ValueError, match="required outputs were not set"): output.to_output_text() def test_version_github_actions_output_writes_to_github_output_if_available( tmp_path: Path, ): mock_output_file = tmp_path / "action.out" prev_version_str = "1.2.2" version_str = "1.2.3" commit_sha = "0" * 40 # 40 zeroes to simulate a SHA-1 hash release_notes = dedent( """\ ## Changes - Added new feature - Fixed bug """ ) patched_environ = {"GITHUB_OUTPUT": str(mock_output_file.resolve())} with mock.patch.dict(os.environ, patched_environ, clear=True): VersionGitHubActionsOutput( gh_client=Github(f"{BASE_VCS_URL}.git", hvcs_domain=EXAMPLE_HVCS_DOMAIN), version=Version.parse(version_str), released=True, commit_sha=commit_sha, release_notes=release_notes, prev_version=Version.parse(prev_version_str), ).write_if_possible() with open(mock_output_file, encoding="utf-8", newline=os.linesep) as rfd: action_outputs = actions_output_to_dict(rfd.read()) # Evaluate (expected -> actual) assert version_str == action_outputs["version"] assert str(True).lower() == action_outputs["released"] assert str(False).lower() == action_outputs["is_prerelease"] assert f"{BASE_VCS_URL}/releases/tag/v{version_str}" == action_outputs["link"] assert f"v{version_str}" == action_outputs["tag"] assert commit_sha == action_outputs["commit_sha"] assert prev_version_str == action_outputs["previous_version"] assert release_notes == action_outputs["release_notes"] def test_version_github_actions_output_no_error_if_not_in_gha( monkeypatch: pytest.MonkeyPatch, ): output = VersionGitHubActionsOutput( gh_client=Github(f"{BASE_VCS_URL}.git"), version=Version.parse("1.2.3"), released=True, commit_sha="0" * 40, # 40 zeroes to simulate a SHA-1 hash ) monkeypatch.delenv("GITHUB_OUTPUT", raising=False) output.write_if_possible() python-semantic-release-10.4.1/tests/unit/semantic_release/cli/test_masking_filter.py000066400000000000000000000143311506116242600311440ustar00rootroot00000000000000import io import logging import random import re import string from logging import LogRecord import pytest from semantic_release.cli.masking_filter import MaskingFilter random.seed(0) def _random_string(length: int = 10) -> str: alphabet = ( string.ascii_lowercase + string.ascii_uppercase + string.digits + string.punctuation ) return "".join(random.choice(alphabet) for _ in range(length)) @pytest.fixture def default_masking_filter(): return MaskingFilter() @pytest.fixture def logging_output_stream(): return io.StringIO() @pytest.fixture def logger(logging_output_stream, default_masking_filter): root = logging.getLogger() root.setLevel(logging.DEBUG) root.addHandler(logging.StreamHandler(logging_output_stream)) log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) for h in root.handlers: h.addFilter(default_masking_filter) return log @pytest.mark.parametrize( "unwanted", [f(obj) for f in (repr, str) for obj in ("", None)] ) def test_unwanted_masks_not_applied(default_masking_filter, unwanted): default_masking_filter.add_mask_for(unwanted, "foo") assert default_masking_filter._redact_patterns["foo"] == set() test_str = f"A long string containing the unwanted {unwanted} data" assert default_masking_filter.mask(test_str) == test_str @pytest.mark.parametrize( "masked, secret", [ ("secret-token", "secret-token"), (re.compile(r"ghp_.+?(?=\s|$)"), "ghp_" + _random_string(15)), ], ) @pytest.mark.parametrize("use_named_masks", (True, False)) def test_mask_applied(use_named_masks, masked, secret): masker = MaskingFilter(_use_named_masks=use_named_masks) masker.add_mask_for(masked, "secret") test_str = "Your secret is... {secret} preferably hidden" assert masker.mask(test_str.format(secret=secret)) == test_str.format( secret="<'secret' (value removed)>" if use_named_masks else masker.REPLACE_STR ) _secrets = ( "token" + _random_string(), "token" + _random_string(), "secret" + _random_string(), "secret" + _random_string(), ) @pytest.mark.parametrize( "masked, secrets", [ (_secrets, _secrets), ((re.compile(r"token.+?(?=\s|$)"), re.compile(r"secret.+?(?=\s|$)")), _secrets), ], ) def test_multiple_secrets_with_same_mask(masked, secrets): masker = MaskingFilter(_use_named_masks=True) for mask in masked: masker.add_mask_for(mask, "ksam") test_str = " ".join(secrets) assert masker.mask(test_str) == " ".join( "<'ksam' (value removed)>" for _ in secrets ) def test_secrets_exact_replacement(): masker = MaskingFilter(_use_named_masks=True) for secret in _secrets: masker.add_mask_for(secret, "smak") test_str = ", ".join(_secrets) + "!" assert ( masker.mask(test_str) == ", ".join("<'smak' (value removed)>" for _ in _secrets) + "!" ) @pytest.mark.parametrize( "rec", [ LogRecord( name=__name__, level=logging.INFO, pathname=__file__, lineno=10, args=(_secrets[3],), msg="long message with format %s for secret", exc_info=None, ), LogRecord( name=__name__, level=logging.INFO, pathname=__file__, lineno=10, args={"secret1": _secrets[1], "secret2": _secrets[2]}, msg="another message using %(secret1)s and %(secret2)s", exc_info=None, ), ], ) @pytest.mark.parametrize( "masked", (_secrets, (re.compile(r"(secret|token).+?(?=\s|$)"),)) ) def test_log_record_is_masked_with_simple_args(default_masking_filter, rec, masked): for mask in masked: default_masking_filter.add_mask_for(mask) if isinstance(rec.args, tuple): assert rec.msg % tuple( default_masking_filter.REPLACE_STR for _ in rec.args ) == default_masking_filter.mask(rec.getMessage()) elif isinstance(rec.args, dict): assert rec.msg % { k: default_masking_filter.REPLACE_STR for k in rec.args } == default_masking_filter.mask(rec.getMessage()) @pytest.mark.parametrize( "rec", [ LogRecord( name=__name__, level=logging.INFO, pathname=__file__, lineno=10, args=(_secrets,), msg="long message with format %s for secrets", exc_info=None, ), LogRecord( name=__name__, level=logging.INFO, pathname=__file__, lineno=10, args={"secret1": _secrets[1], "other": _secrets[2:]}, msg="another message using %(secret1)s and %(other)r", exc_info=None, ), ], ) @pytest.mark.parametrize( "masked", (_secrets, (re.compile(r"(secret|token).+?(?=\s|$)"),)) ) def test_log_record_is_masked_with_nontrivial_args(default_masking_filter, rec, masked): for mask in masked: default_masking_filter.add_mask_for(mask) assert any(secret in rec.getMessage() for secret in _secrets) assert all( secret not in default_masking_filter.mask(rec.getMessage()) for secret in _secrets ) @pytest.mark.parametrize( "log_level", ( logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL, ), ) def test_log_messages_are_masked( default_masking_filter, log_level, logging_output_stream, logger, tmp_path ): for secret in _secrets: default_masking_filter.add_mask_for(secret) logger.log(log_level, ", ".join("%s" for _ in _secrets), *_secrets) root = logging.getLogger() for h in (*root.handlers, *logger.handlers): h.flush() written = logging_output_stream.getvalue() assert all(secret not in written for secret in _secrets) @pytest.mark.parametrize("obj", (object(), (), {}, AttributeError("whoopsie"))) def test_non_strings_are_returned(default_masking_filter, obj): rec = LogRecord( name=__name__, level=logging.INFO, pathname=__file__, lineno=10, args=(), msg=obj, exc_info=None, ) assert default_masking_filter.mask(rec.getMessage()) == str(obj) python-semantic-release-10.4.1/tests/unit/semantic_release/cli/test_util.py000066400000000000000000000110421506116242600271170ustar00rootroot00000000000000from __future__ import annotations import json from textwrap import dedent from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.util import load_raw_config_file, parse_toml from semantic_release.errors import InvalidConfiguration if TYPE_CHECKING: from pathlib import Path from typing import Any @pytest.mark.parametrize( "toml_text, expected", [ ( dedent( r""" [not_the_right_key] foo = "bar" """ ), {}, ), ( dedent( r""" [semantic_release] foo = "bar" """ ), {"foo": "bar"}, ), ( dedent( r""" [tool.semantic_release] abc = 123 [tool.semantic_release.foo] def = 456 """ ), {"abc": 123, "foo": {"def": 456}}, ), ], ) def test_parse_toml(toml_text: str, expected: dict[str, Any]): assert parse_toml(toml_text) == expected def test_parse_toml_raises_invalid_configuration_with_invalid_toml(): invalid_toml = dedent( r""" [semantic_release] foo = bar # this is not a valid TOML string """ ) with pytest.raises(InvalidConfiguration): parse_toml(invalid_toml) @pytest.fixture def raw_toml_config_file(tmp_path: Path) -> Path: path = tmp_path / "config.toml" path.write_text( dedent( r""" [semantic_release] foo = "bar" [semantic_release.abc] bar = "baz" """ ) ) return path @pytest.fixture def raw_pyproject_toml_config_file(tmp_path: Path, pyproject_toml_file: Path) -> Path: tmp_path.mkdir(exist_ok=True) path = tmp_path / pyproject_toml_file path.write_text( dedent( r""" [tool.semantic_release] foo = "bar" [tool.semantic_release.abc] bar = "baz" """ ) ) return path @pytest.fixture def raw_json_config_file(tmp_path: Path) -> Path: tmp_path.mkdir(exist_ok=True) path = tmp_path / ".releaserc" path.write_text( json.dumps( {"semantic_release": {"foo": "bar", "abc": {"bar": "baz"}}}, indent=4 ) ) return path @pytest.fixture def invalid_toml_config_file(tmp_path: Path) -> Path: path = tmp_path / "config.toml" path.write_text( dedent( r""" [semantic_release] foo = bar # no quotes == invalid [semantic_release.abc] bar = "baz" """ ) ) return path @pytest.fixture def invalid_json_config_file(tmp_path: Path) -> Path: tmp_path.mkdir(exist_ok=True) path = tmp_path / "releaserc.json" path.write_text( dedent( r""" {"semantic_release": {foo: "bar", "abc": {bar: "baz"}}} """ ) ) return path @pytest.fixture def invalid_other_config_file(tmp_path: Path) -> Path: # e.g. XML path = tmp_path / "config.xml" path.write_text( dedent( r""" bar baz """ ) ) return path @pytest.mark.parametrize( "raw_config_file, expected", [ ( lazy_fixture(raw_toml_config_file.__name__), {"foo": "bar", "abc": {"bar": "baz"}}, ), ( lazy_fixture(raw_pyproject_toml_config_file.__name__), {"foo": "bar", "abc": {"bar": "baz"}}, ), ( lazy_fixture(raw_json_config_file.__name__), {"foo": "bar", "abc": {"bar": "baz"}}, ), ], ) def test_load_raw_config_file_loads_config( raw_config_file: Path, expected: dict[str, Any] ): assert load_raw_config_file(raw_config_file) == expected @pytest.mark.parametrize( "raw_config_file", [ lazy_fixture(invalid_toml_config_file.__name__), lazy_fixture(invalid_json_config_file.__name__), lazy_fixture(invalid_other_config_file.__name__), ], ) def test_load_raw_invalid_config_file_raises_error(raw_config_file: Path): with pytest.raises(InvalidConfiguration): load_raw_config_file(raw_config_file) python-semantic-release-10.4.1/tests/unit/semantic_release/cli/test_version.py000066400000000000000000000014371506116242600276360ustar00rootroot00000000000000import pytest from semantic_release.cli.commands.version import is_forced_prerelease @pytest.mark.parametrize( "force_prerelease, force_level, prerelease, expected", [ *[ (True, force_level, prerelease, True) for force_level in (None, "major", "minor", "patch") for prerelease in (True, False) ], *[ (False, force_level, prerelease, False) for force_level in ("major", "minor", "patch") for prerelease in (True, False) ], *[(False, None, prerelease, prerelease) for prerelease in (True, False)], ], ) def test_is_forced_prerelease(force_prerelease, force_level, prerelease, expected): assert is_forced_prerelease(force_prerelease, force_level, prerelease) == expected python-semantic-release-10.4.1/tests/unit/semantic_release/commit_parser/000077500000000000000000000000001506116242600266305ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/commit_parser/__init__.py000066400000000000000000000000001506116242600307270ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/commit_parser/test_conventional.py000066400000000000000000001322701506116242600327450ustar00rootroot00000000000000# ruff: noqa: SIM300 from __future__ import annotations from textwrap import dedent from typing import TYPE_CHECKING, Iterable, Sequence import pytest from semantic_release.commit_parser.conventional import ( ConventionalCommitParser, ConventionalCommitParserOptions, ) from semantic_release.commit_parser.token import ParsedCommit, ParseError from semantic_release.enums import LevelBump from tests.const import SUPPORTED_ISSUE_CLOSURE_PREFIXES if TYPE_CHECKING: from tests.conftest import MakeCommitObjFn # NOTE: GitLab squash commits are not tested because by default # they don't have any unique attributes of them and they are also # fully customizable. # See https://docs.gitlab.com/ee/user/project/merge_requests/commit_templates.html # It also depends if Fast-Forward merge is enabled because that will # define if there is a merge commit or not and with that likely no # Merge Request Number included unless the user adds it. # TODO: add the recommendation in the PSR documentation is to set your GitLab templates # to mirror GitHub like references in the first subject line. Will Not matter # if fast-forward merge is enabled or not. @pytest.mark.parametrize( "commit_message", ["", "feat(parser\n): Add new parser pattern"] ) def test_parser_raises_unknown_message_style( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, ): parsed_results = default_conventional_parser.parse(make_commit_obj(commit_message)) assert isinstance(parsed_results, Iterable) for result in parsed_results: assert isinstance(result, ParseError) @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via BitBucket PR resolution", dedent( """\ Merged in feat/my-awesome-stuff (pull request #10) fix(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ None, { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via BitBucket PR resolution", dedent( """\ Merged in feat/my-awesome-stuff (pull request #10) fix(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author feat: implemented searching gizmos by keyword docs(parser): add new parser pattern fix(cli)!: changed option name BREAKING CHANGE: A breaking change description Closes: #555 invalid non-conventional formatted commit """ ), [ None, { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": "features", "descriptions": ["implemented searching gizmos by keyword"], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": "bug fixes", "scope": "cli", "descriptions": [ "changed option name", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "invalid non-conventional formatted commit", ], "breaking_descriptions": [ "A breaking change description", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_bitbucket_squash_style( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( **{ **default_conventional_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v11 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 fix(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), } ], ), ( "Multiple commits squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 fix(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author commit 1f34769bf8352131ad6f4879b8c47becf3c7aa69 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 feat: implemented searching gizmos by keyword commit b2334a64a11ef745a17a2a4034f651e08e8c45a6 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 docs(parser): add new parser pattern commit 5f0292fb5a88c3a46e4a02bec35b85f5228e8e51 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 fix(cli)!: changed option name BREAKING CHANGE: A breaking change description Closes: #555 commit 2f314e7924be161cfbf220d3b6e2a6189a3b5609 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), }, { "bump": LevelBump.MINOR, "type": "features", "descriptions": ["implemented searching gizmos by keyword"], }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], }, { "bump": LevelBump.MAJOR, "type": "bug fixes", "scope": "cli", "descriptions": [ "changed option name", ], "breaking_descriptions": [ "A breaking change description", ], "linked_issues": ("#555",), }, None, ], ), ] ], ) def test_parser_squashed_commit_git_squash_style( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( **{ **default_conventional_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v11 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via GitHub PR resolution", dedent( """\ fix(release-config): some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via GitHub PR resolution", dedent( """\ fix(release-config): some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author * feat: implemented searching gizmos by keyword * docs(parser): add new parser pattern * fix(cli)!: changed option name BREAKING CHANGE: A breaking change description Closes: #555 * invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": "features", "descriptions": ["implemented searching gizmos by keyword"], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": "bug fixes", "scope": "cli", "descriptions": [ "changed option name", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "* invalid non-conventional formatted commit", ], "breaking_descriptions": [ "A breaking change description", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_github_squash_style( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( **{ **default_conventional_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v11 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, bump", [ ( "feat(parsers): add new parser pattern\n\nBREAKING CHANGE: change", LevelBump.MAJOR, ), ("feat(parsers)!: add new parser pattern", LevelBump.MAJOR), ( "feat(parsers): add new parser pattern\n\nNew pattern is awesome\n\n" "BREAKING CHANGE: change \n", LevelBump.MAJOR, ), ( "feat(parsers): add new parser pattern\n\nBREAKING-CHANGE: change !", LevelBump.MAJOR, ), ("feat(parser): add emoji parser", LevelBump.MINOR), ("fix(parser): fix regex in conventional parser", LevelBump.PATCH), ("test(parser): add a test for conventional parser", LevelBump.NO_RELEASE), ("feat(parser)!: edit data parsing stuff", LevelBump.MAJOR), ("fix!: edit data parsing stuff again", LevelBump.MAJOR), ("fix: superfix", LevelBump.PATCH), ], ) def test_parser_returns_correct_bump_level( default_conventional_parser: ConventionalCommitParser, commit_message: str, bump: LevelBump, make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(commit_message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.bump is bump @pytest.mark.parametrize( "message, type_", [ ("feat(parser): ...", "features"), ("fix(parser): ...", "bug fixes"), ("test(parser): ...", "testing"), ("docs(parser): ...", "documentation"), ("style(parser): ...", "code style"), ("refactor(parser): ...", "refactoring"), ("chore(parser): ...", "chores"), ], ) def test_parser_return_type_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, type_: str, make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.type == type_ @pytest.mark.parametrize( "message, scope", [ ("chore(parser): ...", "parser"), ("chore(a part): ...", "a part"), ("chore(a_part): ...", "a_part"), ("chore(a-part): ...", "a-part"), ("chore(a.part): ...", "a.part"), ("chore(a+part): ...", "a+part"), ("chore(a&part): ...", "a&part"), ("chore((part)): ...", "(part)"), ("chore((p):rt): ...", "(p):rt"), ], ) def test_parser_return_scope_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, scope: str, make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.scope == scope _long_text = ( "This is an long explanatory part of a commit message. It should give " "some insight to the fix this commit adds to the codebase." ) _footer = "Closes: #400" @pytest.mark.parametrize( "message, descriptions", [ ("feat(parser): add emoji parser", ["add emoji parser"]), ( "fix(parser): fix regex in conventional parser", ["fix regex in conventional parser"], ), ( "test(parser): add a test for conventional parser", ["add a test for conventional parser"], ), ( f"fix(tox): fix env \n\n{_long_text}\n\n{_footer}", ["fix env ", _long_text], ), ("fix: superfix", ["superfix"]), ], ) def test_parser_return_subject_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, descriptions: list[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.descriptions == descriptions @pytest.mark.parametrize( "message, subject, merge_request_number", [ # GitHub, Gitea style ( "feat(parser): add emoji parser (#123)", "add emoji parser", "#123", ), # GitLab style ( "fix(parser): fix regex in conventional parser (!456)", "fix regex in conventional parser", "!456", ), # BitBucket style ( "feat(parser): add emoji parser (pull request #123)", "add emoji parser", "#123", ), # Both a linked merge request and an issue footer (should return the linked merge request) ("fix: superfix (#123)\n\nCloses: #400", "superfix", "#123"), # None ("fix: superfix", "superfix", ""), # None but includes an issue footer it should not be considered a linked merge request ("fix: superfix\n\nCloses: #400", "superfix", ""), ], ) def test_parser_return_linked_merge_request_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, subject: str, merge_request_number: str, make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert merge_request_number == result.linked_merge_request assert subject == result.descriptions[0] @pytest.mark.parametrize( "message, linked_issues", [ *[ # GitHub, Gitea, GitLab style ( f"feat(parser): add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: #555", ["#555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: #555", ["#555"]), # lowercase prefix (f"{footer_prefix.upper()}: #555", ["#555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: #555,#444", ["#444", "#555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: #555, #444", ["#444", "#555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: #555 , #444", ["#444", "#555"], ), # Comma separated (w/ extra space) (f"{footer_prefix}: #555 #444", ["#444", "#555"]), # Space separated ( f"{footer_prefix}: #555;#444", ["#444", "#555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: #555; #444", ["#444", "#555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: #555 ; #444", ["#444", "#555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: #555/#444", ["#444", "#555"], ), # slash separated (w/o space) ( f"{footer_prefix}: #555/ #444", ["#444", "#555"], ), # slash separated (w/ space) ( f"{footer_prefix}: #555 / #444", ["#444", "#555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: #555Ƽ", ["#444", "#555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: #555& #444", ["#444", "#555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: #555 & #444", ["#444", "#555"], ), # ampersand separated (w/ extra space) (f"{footer_prefix}: #555 and #444", ["#444", "#555"]), # and separated ( f"{footer_prefix}: #555, #444, and #333", ["#333", "#444", "#555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) (f"{footer_prefix}: #555\n{footer_prefix}: #444", ["#444", "#555"]), # More than 2 issues ( f"{footer_prefix}: #555, #444, #333", ["#333", "#444", "#555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: #555, #3333, #444", ["#444", "#555", "#3333"], ), # Single issue listed multiple times (f"{footer_prefix}: #555, #555", ["#555"]), # Multiple footers with the same issue (f"{footer_prefix}: #555\n{footer_prefix}: #555", ["#555"]), # Multiple issues via multiple inline git footers (f"{footer_prefix}: #555, {footer_prefix}: #444", ["#444", "#555"]), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: #555", "Signed-off-by: johndoe ", f"{footer_prefix}: #444", ], ), ["#444", "#555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} #666", []), (f"{footer_prefix} #666, #777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} #666, {footer_prefix} #777", []), # Invalid 'and' separation (f"{footer_prefix}: #666and#777", ["#666and#777"]), # Invalid prefix ("ref: #666", []), # body mentions an issue and has a different git footer ( "In #666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ # JIRA style ( f"feat(parser): add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: ABC-555", ["ABC-555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: ABC-555", ["ABC-555"]), # lowercase prefix (f"{footer_prefix.upper()}: ABC-555", ["ABC-555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: ABC-555,ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: ABC-555, ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: ABC-555 , ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ extra space) ( f"{footer_prefix}: ABC-555 ABC-444", ["ABC-444", "ABC-555"], ), # Space separated ( f"{footer_prefix}: ABC-555;ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: ABC-555; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: ABC-555 ; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: ABC-555/ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/o space) ( f"{footer_prefix}: ABC-555/ ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ space) ( f"{footer_prefix}: ABC-555 / ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: ABC-555&ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: ABC-555& ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: ABC-555 & ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ extra space) ( f"{footer_prefix}: ABC-555 and ABC-444", ["ABC-444", "ABC-555"], ), # and separated ( f"{footer_prefix}: ABC-555, ABC-444, and ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) ( f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-444", ["ABC-444", "ABC-555"], ), # More than 2 issues ( f"{footer_prefix}: ABC-555, ABC-444, ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: ABC-555, ABC-3333, ABC-444", ["ABC-444", "ABC-555", "ABC-3333"], ), # Single issue listed multiple times (f"{footer_prefix}: ABC-555, ABC-555", ["ABC-555"]), # Multiple footers with the same issue (f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-555", ["ABC-555"]), # Multiple issues via multiple inline git footers ( f"{footer_prefix}: ABC-666, {footer_prefix}: ABC-777", ["ABC-666", "ABC-777"], ), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: ABC-555", "Signed-off-by: johndoe ", f"{footer_prefix}: ABC-444", ], ), ["ABC-444", "ABC-555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} ABC-666", []), (f"{footer_prefix} ABC-666, ABC-777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} ABC-666, {footer_prefix} ABC-777", []), # Invalid 'and' separation (f"{footer_prefix}: ABC-666andABC-777", ["ABC-666andABC-777"]), # Invalid prefix ("ref: ABC-666", []), # body mentions an issue and has a different git footer ( "In ABC-666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ ( f"feat(parser): add magic parser\n\n{footer}", linked_issues, ) for footer, linked_issues in [ # Multiple footers with the same issue but different prefixes ("Resolves: #555\nfix: #444", ["#444", "#555"]), # Whitespace padded footer (" Resolves: #555\n", ["#555"]), ] ], ( # Only grabs the issue reference when there is a GitHub PR reference in the subject "feat(parser): add magic parser (#123)\n\nCloses: #555", ["#555"], ), # Does not grab an issue when there is only a GitHub PR reference in the subject ("feat(parser): add magic parser (#123)", []), # Does not grab an issue when there is only a Bitbucket PR reference in the subject ("feat(parser): add magic parser (pull request #123)", []), ], ) def test_parser_return_linked_issues_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, linked_issues: Sequence[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert 1 == len(parsed_results) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(linked_issues) == result.linked_issues @pytest.mark.parametrize( "message, notices", [ pytest.param( message, notices, id=test_id, ) for test_id, message, notices in [ ( "single notice", dedent( """\ fix(parser): fix regex in conventional parser NOTICE: This is a notice """ ), ["This is a notice"], ), ( "multiline notice", dedent( """\ fix(parser): fix regex in conventional parser NOTICE: This is a notice that is longer than other notices """ ), ["This is a notice that is longer than other notices"], ), ( "multiple notices", dedent( """\ fix(parser): fix regex in conventional parser NOTICE: This is a notice NOTICE: This is a second notice """ ), ["This is a notice", "This is a second notice"], ), ( "notice with other footer", dedent( """\ fix(parser): fix regex in conventional parser BREAKING CHANGE: This is a breaking change NOTICE: This is a notice """ ), ["This is a notice"], ), ] ], ) def test_parser_return_release_notices_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, notices: Sequence[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(notices) == result.release_notices full_description = str.join("\n\n", result.descriptions) full_notice = str.join("\n\n", result.release_notices) assert full_notice not in full_description ############################## # test custom parser options # ############################## def test_parser_custom_default_level(make_commit_obj: MakeCommitObjFn): options = ConventionalCommitParserOptions(default_bump_level=LevelBump.MINOR) parsed_results = ConventionalCommitParser(options).parse( make_commit_obj("test(parser): add a test for conventional parser") ) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.bump is LevelBump.MINOR def test_parser_custom_allowed_types( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, ): new_tag = "custom" custom_allowed_tags = [*default_conventional_parser.options.allowed_tags, new_tag] parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( allowed_tags=tuple(custom_allowed_tags), ) ) for commit_type, commit_msg in [ (new_tag, f"{new_tag}: ..."), # no scope (new_tag, f"{new_tag}(parser): ..."), # with scope ("chores", "chore(parser): ..."), # existing, non-release tag ]: parsed_results = parser.parse(make_commit_obj(commit_msg)) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.type == commit_type assert result.bump is LevelBump.NO_RELEASE def test_parser_custom_allowed_types_ignores_non_types( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, ): banned_tag = "feat" custom_allowed_tags = [*default_conventional_parser.options.allowed_tags] custom_allowed_tags.remove(banned_tag) parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( allowed_tags=tuple(custom_allowed_tags), ) ) parsed_results = parser.parse(make_commit_obj(f"{banned_tag}(parser): ...")) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParseError) def test_parser_custom_minor_tags(make_commit_obj: MakeCommitObjFn): custom_minor_tag = "docs" parser = ConventionalCommitParser( options=ConventionalCommitParserOptions(minor_tags=(custom_minor_tag,)) ) parsed_results = parser.parse(make_commit_obj(f"{custom_minor_tag}: ...")) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.bump is LevelBump.MINOR def test_parser_custom_patch_tags(make_commit_obj: MakeCommitObjFn): custom_patch_tag = "test" parser = ConventionalCommitParser( options=ConventionalCommitParserOptions(patch_tags=(custom_patch_tag,)) ) parsed_results = parser.parse(make_commit_obj(f"{custom_patch_tag}: ...")) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.bump is LevelBump.PATCH def test_parser_ignore_merge_commit( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, ): # Setup: Enable parsing of linked issues parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( **{ **default_conventional_parser.options.__dict__, "ignore_merge_commits": True, } ) ) base_commit = make_commit_obj("Merge branch 'fix/fix-feature' into 'main'") incomming_commit = make_commit_obj("feat: add a new feature") # Setup: Create a merge commit merge_commit = make_commit_obj("Merge branch 'feat/add-new-feature' into 'main'") merge_commit.parents = [base_commit, incomming_commit] # Action parsed_result = parser.parse(merge_commit) assert isinstance(parsed_result, ParseError) assert "Ignoring merge commit" in parsed_result.error python-semantic-release-10.4.1/tests/unit/semantic_release/commit_parser/test_emoji.py000066400000000000000000001177251506116242600313610ustar00rootroot00000000000000from __future__ import annotations from textwrap import dedent from typing import TYPE_CHECKING, Iterable, Sequence import pytest from semantic_release.commit_parser.emoji import EmojiCommitParser, EmojiParserOptions from semantic_release.commit_parser.token import ParsedCommit, ParseError from semantic_release.enums import LevelBump from tests.const import SUPPORTED_ISSUE_CLOSURE_PREFIXES if TYPE_CHECKING: from tests.conftest import MakeCommitObjFn @pytest.mark.parametrize( "commit_message, bump, type_, descriptions, breaking_descriptions", [ # Major bump ( ":boom: Breaking changes\n\nMore description\n\nEven more description", LevelBump.MAJOR, ":boom:", [":boom: Breaking changes"], ["More description", "Even more description"], ), # Minor bump ( ":sparkles: Add a new feature\n\nSome description of the feature", LevelBump.MINOR, ":sparkles:", [":sparkles: Add a new feature", "Some description of the feature"], [], ), # Patch bump ( ":bug: Fixing a bug\n\nThe bug is finally gone!", LevelBump.PATCH, ":bug:", [":bug: Fixing a bug", "The bug is finally gone!"], [], ), # No release ( ":pencil: Documentation changes", LevelBump.NO_RELEASE, "Other", [":pencil: Documentation changes"], [], ), # Multiple emojis ( ":sparkles::pencil: Add a feature and document it", LevelBump.MINOR, ":sparkles:", [":sparkles::pencil: Add a feature and document it"], [], ), # Emoji in description ( ":sparkles: Add a new feature\n\n:boom: should not be detected", LevelBump.MINOR, ":sparkles:", [":sparkles: Add a new feature"], [], ), ], ) def test_default_emoji_parser( default_emoji_parser: EmojiCommitParser, commit_message: str, bump: LevelBump, type_: str, descriptions: list[str], breaking_descriptions: list[str], make_commit_obj: MakeCommitObjFn, ): commit = make_commit_obj(commit_message) parsed_results = default_emoji_parser.parse(commit) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert bump is result.bump assert type_ == result.type assert descriptions == result.descriptions assert breaking_descriptions == result.breaking_descriptions @pytest.mark.parametrize( "message, subject, merge_request_number", [ # GitHub, Gitea style ( ":sparkles: add new feature (#123)", ":sparkles: add new feature", "#123", ), # GitLab style ( ":bug: fix regex in parser (!456)", ":bug: fix regex in parser", "!456", ), # BitBucket style ( ":sparkles: add new feature (pull request #123)", ":sparkles: add new feature", "#123", ), # Both a linked merge request and an issue footer (should return the linked merge request) (":bug: superfix (#123)\n\nCloses: #400", ":bug: superfix", "#123"), # None (":bug: superfix", ":bug: superfix", ""), # None but includes an issue footer it should not be considered a linked merge request (":bug: superfix\n\nCloses: #400", ":bug: superfix", ""), ], ) def test_parser_return_linked_merge_request_from_commit_message( default_emoji_parser: EmojiCommitParser, message: str, subject: str, merge_request_number: str, make_commit_obj: MakeCommitObjFn, ): parsed_results = default_emoji_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert merge_request_number == result.linked_merge_request assert subject == result.descriptions[0] @pytest.mark.parametrize( "message, linked_issues", [ *[ # GitHub, Gitea, GitLab style ( f":sparkles: (parser) add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: #555", ["#555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: #555", ["#555"]), # lowercase prefix (f"{footer_prefix.upper()}: #555", ["#555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: #555,#444", ["#444", "#555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: #555, #444", ["#444", "#555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: #555 , #444", ["#444", "#555"], ), # Comma separated (w/ extra space) (f"{footer_prefix}: #555 #444", ["#444", "#555"]), # Space separated ( f"{footer_prefix}: #555;#444", ["#444", "#555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: #555; #444", ["#444", "#555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: #555 ; #444", ["#444", "#555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: #555/#444", ["#444", "#555"], ), # slash separated (w/o space) ( f"{footer_prefix}: #555/ #444", ["#444", "#555"], ), # slash separated (w/ space) ( f"{footer_prefix}: #555 / #444", ["#444", "#555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: #555Ƽ", ["#444", "#555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: #555& #444", ["#444", "#555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: #555 & #444", ["#444", "#555"], ), # ampersand separated (w/ extra space) (f"{footer_prefix}: #555 and #444", ["#444", "#555"]), # and separated ( f"{footer_prefix}: #555, #444, and #333", ["#333", "#444", "#555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) (f"{footer_prefix}: #555\n{footer_prefix}: #444", ["#444", "#555"]), # More than 2 issues ( f"{footer_prefix}: #555, #444, #333", ["#333", "#444", "#555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: #555, #3333, #444", ["#444", "#555", "#3333"], ), # Single issue listed multiple times (f"{footer_prefix}: #555, #555", ["#555"]), # Multiple footers with the same issue (f"{footer_prefix}: #555\n{footer_prefix}: #555", ["#555"]), # Multiple issues via multiple inline git footers (f"{footer_prefix}: #555, {footer_prefix}: #444", ["#444", "#555"]), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: #555", "Signed-off-by: johndoe ", f"{footer_prefix}: #444", ], ), ["#444", "#555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} #666", []), (f"{footer_prefix} #666, #777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} #666, {footer_prefix} #777", []), # Invalid 'and' separation (f"{footer_prefix}: #666and#777", ["#666and#777"]), # Invalid prefix ("ref: #666", []), # body mentions an issue and has a different git footer ( "In #666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ # JIRA style ( f":sparkles: (parser) add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: ABC-555", ["ABC-555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: ABC-555", ["ABC-555"]), # lowercase prefix (f"{footer_prefix.upper()}: ABC-555", ["ABC-555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: ABC-555,ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: ABC-555, ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: ABC-555 , ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ extra space) ( f"{footer_prefix}: ABC-555 ABC-444", ["ABC-444", "ABC-555"], ), # Space separated ( f"{footer_prefix}: ABC-555;ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: ABC-555; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: ABC-555 ; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: ABC-555/ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/o space) ( f"{footer_prefix}: ABC-555/ ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ space) ( f"{footer_prefix}: ABC-555 / ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: ABC-555&ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: ABC-555& ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: ABC-555 & ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ extra space) ( f"{footer_prefix}: ABC-555 and ABC-444", ["ABC-444", "ABC-555"], ), # and separated ( f"{footer_prefix}: ABC-555, ABC-444, and ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) ( f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-444", ["ABC-444", "ABC-555"], ), # More than 2 issues ( f"{footer_prefix}: ABC-555, ABC-444, ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: ABC-555, ABC-3333, ABC-444", ["ABC-444", "ABC-555", "ABC-3333"], ), # Single issue listed multiple times (f"{footer_prefix}: ABC-555, ABC-555", ["ABC-555"]), # Multiple footers with the same issue (f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-555", ["ABC-555"]), # Multiple issues via multiple inline git footers ( f"{footer_prefix}: ABC-666, {footer_prefix}: ABC-777", ["ABC-666", "ABC-777"], ), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: ABC-555", "Signed-off-by: johndoe ", f"{footer_prefix}: ABC-444", ], ), ["ABC-444", "ABC-555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} ABC-666", []), (f"{footer_prefix} ABC-666, ABC-777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} ABC-666, {footer_prefix} ABC-777", []), # Invalid 'and' separation (f"{footer_prefix}: ABC-666andABC-777", ["ABC-666andABC-777"]), # Invalid prefix ("ref: ABC-666", []), # body mentions an issue and has a different git footer ( "In ABC-666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ ( f":sparkles: (parser) add magic parser\n\n{footer}", linked_issues, ) for footer, linked_issues in [ # Multiple footers with the same issue but different prefixes ("Resolves: #555\nfix: #444", ["#444", "#555"]), # Whitespace padded footer (" Resolves: #555\n", ["#555"]), ] ], ( # Only grabs the issue reference when there is a GitHub PR reference in the subject ":sparkles: (parser) add magic parser (#123)\n\nCloses: #555", ["#555"], ), # Does not grab an issue when there is only a GitHub PR reference in the subject (":sparkles: (parser) add magic parser (#123)", []), # Does not grab an issue when there is only a Bitbucket PR reference in the subject (":sparkles: (parser) add magic parser (pull request #123)", []), ], ) def test_parser_return_linked_issues_from_commit_message( default_emoji_parser: EmojiCommitParser, message: str, linked_issues: Sequence[str], make_commit_obj: MakeCommitObjFn, ): # Setup: Enable parsing of linked issues parser = EmojiCommitParser( options=EmojiParserOptions( **{ **default_emoji_parser.options.__dict__, "parse_linked_issues": True, } ) ) # Action parsed_results = parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 # Evaluate (expected -> actual) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(linked_issues) == result.linked_issues @pytest.mark.parametrize( "message, notices", [ pytest.param( message, notices, id=test_id, ) for test_id, message, notices in [ ( "single notice", dedent( """\ :bug:(parser): fix regex in emoji parser NOTICE: This is a notice """ ), ["This is a notice"], ), ( "multiline notice", dedent( """\ :bug:(parser): fix regex in emoji parser NOTICE: This is a notice that is longer than other notices """ ), ["This is a notice that is longer than other notices"], ), ( "multiple notices", dedent( """\ :bug:(parser): fix regex in emoji parser NOTICE: This is a notice NOTICE: This is a second notice """ ), ["This is a notice", "This is a second notice"], ), ( "notice with other footer", dedent( """\ :bug:(parser): fix regex in emoji parser BREAKING CHANGE: This is a breaking change NOTICE: This is a notice """ ), ["This is a notice"], ), ] ], ) def test_parser_return_release_notices_from_commit_message( default_emoji_parser: EmojiCommitParser, message: str, notices: Sequence[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_emoji_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(notices) == result.release_notices full_description = str.join("\n\n", result.descriptions) full_notice = str.join("\n\n", result.release_notices) assert full_notice not in full_description @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via BitBucket PR resolution", dedent( """\ Merged in feat/my-awesome-stuff (pull request #10) :bug:(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.NO_RELEASE, "type": "Other", "descriptions": ["Merged in feat/my-awesome-stuff"], "linked_merge_request": "#10", }, { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ ":bug:(release-config): some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via BitBucket PR resolution", dedent( """\ Merged in feat/my-awesome-stuff (pull request #10) :bug:(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author :sparkles: implemented searching gizmos by keyword :memo:(parser): add new parser pattern :boom::bug: changed option name A breaking change description Closes: #555 invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.NO_RELEASE, "type": "Other", "descriptions": ["Merged in feat/my-awesome-stuff"], "linked_merge_request": "#10", }, { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ ":bug:(release-config): some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": ":sparkles:", "descriptions": [ ":sparkles: implemented searching gizmos by keyword" ], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": ":memo:", "scope": "parser", "descriptions": [ ":memo:(parser): add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": ":boom:", "scope": "", "descriptions": [ ":boom::bug: changed option name", ], "breaking_descriptions": [ "A breaking change description", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "invalid non-conventional formatted commit", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_bitbucket_squash_style( default_emoji_parser: EmojiCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = EmojiCommitParser( options=EmojiParserOptions( **{ **default_emoji_parser.options.__dict__, "parse_squash_commits": True, "parse_linked_issues": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v11 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 :bug:(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ ":bug:(release-config): some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), } ], ), ( "Multiple commits squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 :bug:(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author commit 1f34769bf8352131ad6f4879b8c47becf3c7aa69 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 :sparkles: implemented searching gizmos by keyword commit b2334a64a11ef745a17a2a4034f651e08e8c45a6 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 :memo:(parser): add new parser pattern commit 5f0292fb5a88c3a46e4a02bec35b85f5228e8e51 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 :boom::bug: changed option name A breaking change description Closes: #555 commit 2f314e7924be161cfbf220d3b6e2a6189a3b5609 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ ":bug:(release-config): some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), }, { "bump": LevelBump.MINOR, "type": ":sparkles:", "descriptions": [ ":sparkles: implemented searching gizmos by keyword" ], }, { "bump": LevelBump.NO_RELEASE, "type": ":memo:", "scope": "parser", "descriptions": [ ":memo:(parser): add new parser pattern", ], }, { "bump": LevelBump.MAJOR, "type": ":boom:", "descriptions": [ ":boom::bug: changed option name", ], "breaking_descriptions": [ "A breaking change description", ], "linked_issues": ("#555",), }, { "bump": LevelBump.NO_RELEASE, "type": "Other", "descriptions": ["invalid non-conventional formatted commit"], }, ], ), ] ], ) def test_parser_squashed_commit_git_squash_style( default_emoji_parser: EmojiCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = EmojiCommitParser( options=EmojiParserOptions( **{ **default_emoji_parser.options.__dict__, "parse_squash_commits": True, "parse_linked_issues": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v11 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via GitHub PR resolution", dedent( """\ :bug:(release-config): some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ ":bug:(release-config): some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via GitHub PR resolution", dedent( """\ :bug:(release-config): some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author * :sparkles: implemented searching gizmos by keyword * :memo:(parser): add new parser pattern * :boom::bug: changed option name A breaking change description Closes: #555 * invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ ":bug:(release-config): some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": ":sparkles:", "descriptions": [ ":sparkles: implemented searching gizmos by keyword" ], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": ":memo:", "scope": "parser", "descriptions": [ ":memo:(parser): add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": ":boom:", "scope": "", "descriptions": [ ":boom::bug: changed option name", ], "breaking_descriptions": [ "A breaking change description", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "* invalid non-conventional formatted commit", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_github_squash_style( default_emoji_parser: EmojiCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = EmojiCommitParser( options=EmojiParserOptions( **{ **default_emoji_parser.options.__dict__, "parse_squash_commits": True, "parse_linked_issues": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v11 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request def test_parser_ignore_merge_commit( default_emoji_parser: EmojiCommitParser, make_commit_obj: MakeCommitObjFn, ): # Setup: Enable parsing of linked issues parser = EmojiCommitParser( options=EmojiParserOptions( **{ **default_emoji_parser.options.__dict__, "ignore_merge_commits": True, } ) ) base_commit = make_commit_obj("Merge branch 'fix/fix-feature' into 'main'") incomming_commit = make_commit_obj("feat: add a new feature") # Setup: Create a merge commit merge_commit = make_commit_obj("Merge branch 'feat/add-new-feature' into 'main'") merge_commit.parents = [base_commit, incomming_commit] # Action parsed_result = parser.parse(merge_commit) assert isinstance(parsed_result, ParseError) assert "Ignoring merge commit" in parsed_result.error python-semantic-release-10.4.1/tests/unit/semantic_release/commit_parser/test_parsed_commit.py000066400000000000000000000014321506116242600330670ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING from semantic_release.commit_parser import ParsedCommit from semantic_release.version.version import LevelBump if TYPE_CHECKING: from tests.conftest import MakeCommitObjFn def test_parsed_commit_computed_properties(make_commit_obj: MakeCommitObjFn): message = "feat(parser): Add new parser pattern" commit = make_commit_obj(message) parsed_commit = ParsedCommit( bump=LevelBump.MINOR, type="feature", scope="parser", descriptions=["Add new parser pattern"], breaking_descriptions=[], commit=commit, ) assert message == parsed_commit.message assert commit.hexsha == parsed_commit.hexsha assert commit.hexsha[:7] == parsed_commit.short_hash python-semantic-release-10.4.1/tests/unit/semantic_release/commit_parser/test_scipy.py000066400000000000000000001426741506116242600314060ustar00rootroot00000000000000# ruff: noqa: SIM300 from __future__ import annotations from re import compile as regexp from textwrap import dedent from typing import TYPE_CHECKING, Iterable, Sequence import pytest from semantic_release.commit_parser.scipy import ( ScipyCommitParser, ScipyParserOptions, ) from semantic_release.commit_parser.token import ParsedCommit, ParseError from semantic_release.enums import LevelBump from tests.const import SUPPORTED_ISSUE_CLOSURE_PREFIXES if TYPE_CHECKING: from tests.conftest import MakeCommitObjFn unwordwrap = regexp(r"((? """ ), [ None, { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via BitBucket PR resolution", dedent( """\ Merged in feat/my-awesome-stuff (pull request #10) BUG:release-config: some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author ENH: implemented searching gizmos by keyword DOC: parser: add new parser pattern API:cli: changed option name A breaking change description Closes: #555 invalid non-conventional formatted commit """ ), [ None, { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": "feature", "descriptions": ["implemented searching gizmos by keyword"], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": "breaking", "scope": "cli", "descriptions": [ "changed option name", ], "breaking_descriptions": [ "A breaking change description", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "invalid non-conventional formatted commit", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_bitbucket_squash_style( default_scipy_parser: ScipyCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ScipyCommitParser( options=ScipyParserOptions( **{ **default_scipy_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v11 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 BUG: release-config: some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), } ], ), ( "Multiple commits squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 BUG: release-config: some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author commit 1f34769bf8352131ad6f4879b8c47becf3c7aa69 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 ENH: implemented searching gizmos by keyword commit b2334a64a11ef745a17a2a4034f651e08e8c45a6 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 DOC: parser: add new parser pattern commit 5f0292fb5a88c3a46e4a02bec35b85f5228e8e51 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 API:cli: changed option name A breaking change description Closes: #555 commit 2f314e7924be161cfbf220d3b6e2a6189a3b5609 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), }, { "bump": LevelBump.MINOR, "type": "feature", "descriptions": ["implemented searching gizmos by keyword"], }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], }, { "bump": LevelBump.MAJOR, "type": "breaking", "scope": "cli", "descriptions": [ "changed option name", ], "breaking_descriptions": [ "A breaking change description", ], "linked_issues": ("#555",), }, None, ], ), ] ], ) def test_parser_squashed_commit_git_squash_style( default_scipy_parser: ScipyCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ScipyCommitParser( options=ScipyParserOptions( **{ **default_scipy_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v11 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via GitHub PR resolution", dedent( """\ BUG: release-config: some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via GitHub PR resolution", dedent( """\ BUG: release-config: some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author * ENH: implemented searching gizmos by keyword * DOC: parser: add new parser pattern * API:cli: changed option name A breaking change description Closes: #555 * invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": "feature", "descriptions": ["implemented searching gizmos by keyword"], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": "breaking", "scope": "cli", "descriptions": [ "changed option name", ], "breaking_descriptions": [ "A breaking change description", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "* invalid non-conventional formatted commit", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_github_squash_style( default_scipy_parser: ScipyCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ScipyCommitParser( options=ScipyParserOptions( **{ **default_scipy_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v11 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "message, linked_issues", [ *[ # GitHub, Gitea, GitLab style ( f"ENH: add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: #555", ["#555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: #555", ["#555"]), # lowercase prefix (f"{footer_prefix.upper()}: #555", ["#555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: #555,#444", ["#444", "#555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: #555, #444", ["#444", "#555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: #555 , #444", ["#444", "#555"], ), # Comma separated (w/ extra space) (f"{footer_prefix}: #555 #444", ["#444", "#555"]), # Space separated ( f"{footer_prefix}: #555;#444", ["#444", "#555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: #555; #444", ["#444", "#555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: #555 ; #444", ["#444", "#555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: #555/#444", ["#444", "#555"], ), # slash separated (w/o space) ( f"{footer_prefix}: #555/ #444", ["#444", "#555"], ), # slash separated (w/ space) ( f"{footer_prefix}: #555 / #444", ["#444", "#555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: #555Ƽ", ["#444", "#555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: #555& #444", ["#444", "#555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: #555 & #444", ["#444", "#555"], ), # ampersand separated (w/ extra space) (f"{footer_prefix}: #555 and #444", ["#444", "#555"]), # and separated ( f"{footer_prefix}: #555, #444, and #333", ["#333", "#444", "#555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) (f"{footer_prefix}: #555\n{footer_prefix}: #444", ["#444", "#555"]), # More than 2 issues ( f"{footer_prefix}: #555, #444, #333", ["#333", "#444", "#555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: #555, #3333, #444", ["#444", "#555", "#3333"], ), # Single issue listed multiple times (f"{footer_prefix}: #555, #555", ["#555"]), # Multiple footers with the same issue (f"{footer_prefix}: #555\n{footer_prefix}: #555", ["#555"]), # Multiple issues via multiple inline git footers (f"{footer_prefix}: #555, {footer_prefix}: #444", ["#444", "#555"]), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: #555", "Signed-off-by: johndoe ", f"{footer_prefix}: #444", ], ), ["#444", "#555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} #666", []), (f"{footer_prefix} #666, #777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} #666, {footer_prefix} #777", []), # Invalid 'and' separation (f"{footer_prefix}: #666and#777", ["#666and#777"]), # Invalid prefix ("ref: #666", []), # body mentions an issue and has a different git footer ( "In #666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ # JIRA style ( f"ENH: parser: add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: ABC-555", ["ABC-555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: ABC-555", ["ABC-555"]), # lowercase prefix (f"{footer_prefix.upper()}: ABC-555", ["ABC-555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: ABC-555,ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: ABC-555, ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: ABC-555 , ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ extra space) ( f"{footer_prefix}: ABC-555 ABC-444", ["ABC-444", "ABC-555"], ), # Space separated ( f"{footer_prefix}: ABC-555;ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: ABC-555; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: ABC-555 ; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: ABC-555/ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/o space) ( f"{footer_prefix}: ABC-555/ ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ space) ( f"{footer_prefix}: ABC-555 / ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: ABC-555&ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: ABC-555& ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: ABC-555 & ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ extra space) ( f"{footer_prefix}: ABC-555 and ABC-444", ["ABC-444", "ABC-555"], ), # and separated ( f"{footer_prefix}: ABC-555, ABC-444, and ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) ( f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-444", ["ABC-444", "ABC-555"], ), # More than 2 issues ( f"{footer_prefix}: ABC-555, ABC-444, ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: ABC-555, ABC-3333, ABC-444", ["ABC-444", "ABC-555", "ABC-3333"], ), # Single issue listed multiple times (f"{footer_prefix}: ABC-555, ABC-555", ["ABC-555"]), # Multiple footers with the same issue (f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-555", ["ABC-555"]), # Multiple issues via multiple inline git footers ( f"{footer_prefix}: ABC-666, {footer_prefix}: ABC-777", ["ABC-666", "ABC-777"], ), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: ABC-555", "Signed-off-by: johndoe ", f"{footer_prefix}: ABC-444", ], ), ["ABC-444", "ABC-555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} ABC-666", []), (f"{footer_prefix} ABC-666, ABC-777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} ABC-666, {footer_prefix} ABC-777", []), # Invalid 'and' separation (f"{footer_prefix}: ABC-666andABC-777", ["ABC-666andABC-777"]), # Invalid prefix ("ref: ABC-666", []), # body mentions an issue and has a different git footer ( "In ABC-666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ ( f"ENH: parser: add magic parser\n\n{footer}", linked_issues, ) for footer, linked_issues in [ # Multiple footers with the same issue but different prefixes ("Resolves: #555\nfix: #444", ["#444", "#555"]), # Whitespace padded footer (" Resolves: #555\n", ["#555"]), ] ], ( # Only grabs the issue reference when there is a GitHub PR reference in the subject "ENH: parser: add magic parser (#123)\n\nCloses: #555", ["#555"], ), # Does not grab an issue when there is only a GitHub PR reference in the subject ("ENH: parser: add magic parser (#123)", []), # Does not grab an issue when there is only a Bitbucket PR reference in the subject ("ENH: parser: add magic parser (pull request #123)", []), ], ) def test_parser_return_linked_issues_from_commit_message( default_scipy_parser: ScipyCommitParser, message: str, linked_issues: Sequence[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_scipy_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(linked_issues) == result.linked_issues @pytest.mark.parametrize( "message, notices", [ pytest.param( message, notices, id=test_id, ) for test_id, message, notices in [ ( "single notice", dedent( """\ BUG:parser: fix regex in scipy parser NOTICE: This is a notice """ ), ["This is a notice"], ), ( "multiline notice", dedent( """\ BUG:parser: fix regex in scipy parser NOTICE: This is a notice that is longer than other notices """ ), ["This is a notice that is longer than other notices"], ), ( "multiple notices", dedent( """\ BUG:parser: fix regex in scipy parser NOTICE: This is a notice NOTICE: This is a second notice """ ), ["This is a notice", "This is a second notice"], ), ( "notice with other footer", dedent( """\ BUG:parser: fix regex in scipy parser This is a breaking change NOTICE: This is a notice """ ), ["This is a notice"], ), ] ], ) def test_parser_return_release_notices_from_commit_message( default_scipy_parser: ScipyCommitParser, message: str, notices: Sequence[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_scipy_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(notices) == result.release_notices full_description = str.join("\n\n", result.descriptions) full_notice = str.join("\n\n", result.release_notices) assert full_notice not in full_description def test_parser_ignore_merge_commit( default_scipy_parser: ScipyCommitParser, make_commit_obj: MakeCommitObjFn, ): # Setup: Enable parsing of linked issues parser = ScipyCommitParser( options=ScipyParserOptions( **{ **default_scipy_parser.options.__dict__, "ignore_merge_commits": True, } ) ) base_commit = make_commit_obj("Merge branch 'fix/fix-feature' into 'main'") incomming_commit = make_commit_obj("feat: add a new feature") # Setup: Create a merge commit merge_commit = make_commit_obj("Merge branch 'feat/add-new-feature' into 'main'") merge_commit.parents = [base_commit, incomming_commit] # Action parsed_result = parser.parse(merge_commit) assert isinstance(parsed_result, ParseError) assert "Ignoring merge commit" in parsed_result.error python-semantic-release-10.4.1/tests/unit/semantic_release/commit_parser/test_util.py000066400000000000000000000015041506116242600312160ustar00rootroot00000000000000import pytest from semantic_release.commit_parser.util import parse_paragraphs @pytest.mark.parametrize( "text, expected", [ ("", []), ("\n\n \n\n \n", []), # Unix (LF) - empty lines ("\r\n\r\n \r\n\r\n \n", []), # Windows (CRLF) - empty lines ("\n\nA\n\nB\n", ["A", "B"]), # Unix (LF) ("\r\n\r\nA\r\n\r\nB\n", ["A", "B"]), # Windows (CRLF) ( "Long\nexplanation\n\nfull of interesting\ndetails", ["Long explanation", "full of interesting details"], ), ( # Windows uses CRLF "Long\r\nexplanation\r\n\r\nfull of interesting\r\ndetails", ["Long explanation", "full of interesting details"], ), ], ) def test_parse_paragraphs(text, expected): assert parse_paragraphs(text) == expected python-semantic-release-10.4.1/tests/unit/semantic_release/hvcs/000077500000000000000000000000001506116242600247275ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/hvcs/__init__.py000066400000000000000000000000001506116242600270260ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/hvcs/test__base.py000066400000000000000000000034071506116242600274150ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.hvcs._base import HvcsBase from tests.const import EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER from tests.fixtures.git_repo import example_git_https_url, example_git_ssh_url if TYPE_CHECKING: from typing import Any, Callable class ArbitraryHvcs(HvcsBase): def remote_url(self, use_token: bool) -> str: return super().remote_url(use_token) def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: return super().get_changelog_context_filters() @pytest.mark.parametrize( "remote_url, repo_name", [ (lazy_fixture(example_git_ssh_url.__name__), EXAMPLE_REPO_NAME), (lazy_fixture(example_git_https_url.__name__), EXAMPLE_REPO_NAME), ("git@my.corp.custom.domain:very_serious/business.git", "business"), ], ) def test_get_repository_owner(remote_url, repo_name): client = ArbitraryHvcs(remote_url) assert client.repo_name == repo_name @pytest.mark.parametrize( "remote_url, owner", [ (lazy_fixture(example_git_ssh_url.__name__), EXAMPLE_REPO_OWNER), (lazy_fixture(example_git_https_url.__name__), EXAMPLE_REPO_OWNER), ("git@my.corp.custom.domain:very_serious/business.git", "very_serious"), ], ) def test_get_repository_name(remote_url, owner): client = ArbitraryHvcs(remote_url) assert client.owner == owner @pytest.mark.parametrize( "bad_url", [ "a" * 25, "https://a/b/c/d/.git", "https://github.com/wrong", "git@gitlab.com/somewhere", ], ) def test_hvcs_parse_error(bad_url: str): with pytest.raises(ValueError): ArbitraryHvcs(bad_url) python-semantic-release-10.4.1/tests/unit/semantic_release/hvcs/test_bitbucket.py000066400000000000000000000256211506116242600303220ustar00rootroot00000000000000from __future__ import annotations import os from unittest import mock import pytest from semantic_release.hvcs.bitbucket import Bitbucket from tests.const import EXAMPLE_HVCS_DOMAIN, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER @pytest.fixture def default_bitbucket_client(): remote_url = ( f"git@{Bitbucket.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git" ) return Bitbucket(remote_url=remote_url) @pytest.mark.parametrize( str.join( ", ", [ "patched_os_environ", "hvcs_domain", "hvcs_api_domain", "expected_hvcs_domain", "expected_api_url", "insecure", ], ), [ # No env vars as CI is handled by Bamboo or Jenkins (which require user defined defaults) # API paths are different in BitBucket Cloud (bitbucket.org) vs BitBucket Data Center ( # Default values (BitBucket Cloud) {}, None, None, f"https://{Bitbucket.DEFAULT_DOMAIN}", Bitbucket.DEFAULT_API_URL_CLOUD, False, ), ( # Explicitly set default values {}, Bitbucket.DEFAULT_DOMAIN, Bitbucket.DEFAULT_API_URL_CLOUD, f"https://{Bitbucket.DEFAULT_DOMAIN}", Bitbucket.DEFAULT_API_URL_CLOUD, False, ), ( # Explicitly set custom values with full api path {}, EXAMPLE_HVCS_DOMAIN, f"{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", False, ), ( # Explicitly defined api as subdomain # POSSIBLY WRONG ASSUMPTION of Api path for BitBucket Server {}, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://api.{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://api.{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", False, ), ( # Custom domain for on premise BitBucket Server (derive api endpoint) {}, EXAMPLE_HVCS_DOMAIN, None, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", False, ), ( # Custom domain with path prefix {}, "special.custom.server/bitbucket", None, "https://special.custom.server/bitbucket", "https://special.custom.server/bitbucket/rest/api/1.0", False, ), ( # Allow insecure http connections explicitly {}, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", True, ), ( # Allow insecure http connections explicitly & imply insecure api domain {}, f"http://{EXAMPLE_HVCS_DOMAIN}", None, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", True, ), ( # Infer insecure connection from user configuration {}, EXAMPLE_HVCS_DOMAIN, f"{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", True, ), ( # Infer insecure connection from user configuration & imply insecure api domain {}, EXAMPLE_HVCS_DOMAIN, None, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", True, ), ], ) @pytest.mark.parametrize( "remote_url", [ f"git@{Bitbucket.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", f"https://{Bitbucket.DEFAULT_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", ], ) @pytest.mark.parametrize("token", ("abc123", None)) def test_bitbucket_client_init( patched_os_environ: dict[str, str], hvcs_domain: str | None, hvcs_api_domain: str | None, expected_hvcs_domain: str, expected_api_url: str, remote_url: str, token: str | None, insecure: bool, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): client = Bitbucket( remote_url=remote_url, hvcs_domain=hvcs_domain, hvcs_api_domain=hvcs_api_domain, token=token, allow_insecure=insecure, ) assert expected_hvcs_domain == str(client.hvcs_domain) assert expected_api_url == str(client.api_url) assert token == client.token assert remote_url == client._remote_url @pytest.mark.parametrize( "hvcs_domain, hvcs_api_domain, insecure", [ # Bad base domain schemes (f"ftp://{EXAMPLE_HVCS_DOMAIN}", None, False), (f"ftp://{EXAMPLE_HVCS_DOMAIN}", None, True), # Unallowed insecure connections when base domain is insecure (f"http://{EXAMPLE_HVCS_DOMAIN}", None, False), # Bad API domain schemes (None, f"ftp://api.{EXAMPLE_HVCS_DOMAIN}", False), (None, f"ftp://api.{EXAMPLE_HVCS_DOMAIN}", True), # Unallowed insecure connections when api domain is insecure (None, f"http://{EXAMPLE_HVCS_DOMAIN}", False), ], ) def test_bitbucket_client_init_with_invalid_scheme( hvcs_domain: str | None, hvcs_api_domain: str | None, insecure: bool, ): with pytest.raises(ValueError), mock.patch.dict(os.environ, {}, clear=True): Bitbucket( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", hvcs_domain=hvcs_domain, hvcs_api_domain=hvcs_api_domain, allow_insecure=insecure, ) @pytest.mark.parametrize( "patched_os_environ, expected_owner, expected_name", [ ({}, None, None), ({"BITBUCKET_REPO_FULL_NAME": "path/to/repo/foo"}, "path/to/repo", "foo"), ], ) def test_bitbucket_get_repository_owner_and_name( default_bitbucket_client: Bitbucket, patched_os_environ: dict[str, str], expected_owner: str, expected_name: str, ): # expected results should be a tuple[namespace, repo_name] # when None, the default values are used which matches default_bitbucket_client's setup expected_result = ( expected_owner or EXAMPLE_REPO_OWNER, expected_name or EXAMPLE_REPO_NAME, ) with mock.patch.dict(os.environ, patched_os_environ, clear=True): # Execute in mocked environment result = default_bitbucket_client._get_repository_owner_and_name() # Evaluate (expected -> actual) assert expected_result == result def test_compare_url(default_bitbucket_client: Bitbucket): start_rev = "revA" end_rev = "revB" expected_url = ( "{server}/{owner}/{repo}/branches/compare/{from_rev}%0D{to_rev}".format( server=default_bitbucket_client.hvcs_domain.url, owner=default_bitbucket_client.owner, repo=default_bitbucket_client.repo_name, from_rev=start_rev, to_rev=end_rev, ) ) actual_url = default_bitbucket_client.compare_url( from_rev=start_rev, to_rev=end_rev ) assert expected_url == actual_url @pytest.mark.parametrize( "patched_os_environ, use_token, token, remote_url, expected_auth_url", [ ( {"BITBUCKET_USER": "foo"}, False, "", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", ), ( {}, False, "aabbcc", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", ), ( {}, True, "aabbcc", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", f"https://x-token-auth:aabbcc@{Bitbucket.DEFAULT_DOMAIN}/custom/example.git", ), ( {"BITBUCKET_USER": "foo"}, False, "aabbcc", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", ), ( {"BITBUCKET_USER": "foo"}, True, "aabbcc", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", f"https://foo:aabbcc@{Bitbucket.DEFAULT_DOMAIN}/custom/example.git", ), ], ) def test_remote_url( default_bitbucket_client: Bitbucket, patched_os_environ: dict[str, str], use_token: bool, token: str, remote_url: str, expected_auth_url: str, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): default_bitbucket_client._remote_url = remote_url default_bitbucket_client.token = token assert expected_auth_url == default_bitbucket_client.remote_url( use_token=use_token ) def test_commit_hash_url(default_bitbucket_client: Bitbucket): sha = "244f7e11bcb1e1ce097db61594056bc2a32189a0" expected_url = "{server}/{owner}/{repo}/commits/{sha}".format( server=default_bitbucket_client.hvcs_domain, owner=default_bitbucket_client.owner, repo=default_bitbucket_client.repo_name, sha=sha, ) assert expected_url == default_bitbucket_client.commit_hash_url(sha) def test_commit_hash_url_w_custom_server(): """ Test the commit hash URL generation for a self-hosted Bitbucket server with prefix. ref: https://github.com/python-semantic-release/python-semantic-release/issues/1204 """ sha = "244f7e11bcb1e1ce097db61594056bc2a32189a0" expected_url = "{server}/{owner}/{repo}/commits/{sha}".format( server=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo", owner="foo", repo=EXAMPLE_REPO_NAME, sha=sha, ) with mock.patch.dict(os.environ, {}, clear=True): actual_url = Bitbucket( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo/foo/{EXAMPLE_REPO_NAME}.git", hvcs_domain=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo", ).commit_hash_url(sha) assert expected_url == actual_url @pytest.mark.parametrize("pr_number", (666, "666", "#666")) def test_pull_request_url(default_bitbucket_client: Bitbucket, pr_number: int | str): expected_url = "{server}/{owner}/{repo}/pull-requests/{pr_number}".format( server=default_bitbucket_client.hvcs_domain, owner=default_bitbucket_client.owner, repo=default_bitbucket_client.repo_name, pr_number=str(pr_number).lstrip("#"), ) actual_url = default_bitbucket_client.pull_request_url(pr_number=pr_number) assert expected_url == actual_url python-semantic-release-10.4.1/tests/unit/semantic_release/hvcs/test_gitea.py000066400000000000000000000707721506116242600274460ustar00rootroot00000000000000from __future__ import annotations import fnmatch import glob import os import re from typing import TYPE_CHECKING from unittest import mock from urllib.parse import urlencode import pytest import requests_mock from requests import HTTPError, Response, Session from requests.auth import _basic_auth_str from semantic_release.hvcs.gitea import Gitea from semantic_release.hvcs.token_auth import TokenAuth from tests.const import ( EXAMPLE_HVCS_DOMAIN, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER, RELEASE_NOTES, ) from tests.fixtures.example_project import init_example_project if TYPE_CHECKING: from pathlib import Path from typing import Generator from tests.conftest import NetrcFileFn @pytest.fixture def default_gitea_client() -> Generator[Gitea, None, None]: remote_url = ( f"git@{Gitea.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git" ) with mock.patch.dict(os.environ, {}, clear=True): yield Gitea(remote_url=remote_url) @pytest.mark.parametrize( str.join( ", ", [ "patched_os_environ", "hvcs_domain", "expected_hvcs_domain", "insecure", ], ), # NOTE: Gitea does not have a different api domain [ # Default values ({}, None, f"https://{Gitea.DEFAULT_DOMAIN}", False), ( # Gather domain from environment {"GITEA_SERVER_URL": "https://special.custom.server/"}, None, "https://special.custom.server", False, ), ( # Custom domain with path prefix (derives from environment) {"GITEA_SERVER_URL": "https://special.custom.server/vcs/"}, None, "https://special.custom.server/vcs", False, ), ( # Ignore environment & use provided parameter value (ie from user config) {"GITEA_SERVER_URL": "https://special.custom.server/"}, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}", False, ), ( # Allow insecure http connections explicitly {}, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}", True, ), ( # Infer insecure connection from user configuration {}, EXAMPLE_HVCS_DOMAIN, f"http://{EXAMPLE_HVCS_DOMAIN}", True, ), ], ) @pytest.mark.parametrize( "remote_url", [ f"git@{Gitea.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", f"https://{Gitea.DEFAULT_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", ], ) @pytest.mark.parametrize("token", ("abc123", None)) def test_gitea_client_init( patched_os_environ: dict[str, str], hvcs_domain: str | None, expected_hvcs_domain: str, remote_url: str, token: str | None, insecure: bool, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): client = Gitea( remote_url=remote_url, hvcs_domain=hvcs_domain, token=token, allow_insecure=insecure, ) # Evaluate (expected -> actual) assert expected_hvcs_domain == client.hvcs_domain.url assert f"{expected_hvcs_domain}/api/v1" == str(client.api_url) assert token == client.token assert remote_url == client._remote_url assert hasattr(client, "session") assert isinstance(getattr(client, "session", None), Session) @pytest.mark.parametrize( "hvcs_domain, insecure", [ (f"ftp://{EXAMPLE_HVCS_DOMAIN}", False), (f"ftp://{EXAMPLE_HVCS_DOMAIN}", True), (f"http://{EXAMPLE_HVCS_DOMAIN}", False), ], ) def test_gitea_client_init_with_invalid_scheme(hvcs_domain: str, insecure: bool): with pytest.raises(ValueError), mock.patch.dict(os.environ, {}, clear=True): Gitea( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", hvcs_domain=hvcs_domain, allow_insecure=insecure, ) def test_gitea_get_repository_owner_and_name(default_gitea_client: Gitea): expected_result = (EXAMPLE_REPO_OWNER, EXAMPLE_REPO_NAME) # Execute method under test result = default_gitea_client._get_repository_owner_and_name() # Evaluate (expected -> actual) assert expected_result == result @pytest.mark.parametrize( "use_token, token, remote_url, expected_auth_url", [ ( False, "", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", ), ( True, "", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", ), ( False, "aabbcc", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", ), ( True, "aabbcc", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", f"https://aabbcc@{Gitea.DEFAULT_DOMAIN}/custom/example.git", ), ], ) def test_remote_url( default_gitea_client: Gitea, use_token: bool, token: str, remote_url: str, expected_auth_url: str, ): default_gitea_client._remote_url = remote_url default_gitea_client.token = token assert expected_auth_url == default_gitea_client.remote_url(use_token=use_token) def test_commit_hash_url(default_gitea_client: Gitea): sha = "hashashash" expected_url = "{server}/{owner}/{repo}/commit/{sha}".format( server=default_gitea_client.hvcs_domain.url, owner=default_gitea_client.owner, repo=default_gitea_client.repo_name, sha=sha, ) assert expected_url == default_gitea_client.commit_hash_url(sha) def test_commit_hash_url_w_custom_server(): """ Test the commit hash URL generation for a self-hosted Bitbucket server with prefix. ref: https://github.com/python-semantic-release/python-semantic-release/issues/1204 """ sha = "244f7e11bcb1e1ce097db61594056bc2a32189a0" expected_url = "{server}/{owner}/{repo}/commit/{sha}".format( server=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo", owner="foo", repo=EXAMPLE_REPO_NAME, sha=sha, ) with mock.patch.dict(os.environ, {}, clear=True): actual_url = Gitea( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo/foo/{EXAMPLE_REPO_NAME}.git", hvcs_domain=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo", ).commit_hash_url(sha) assert expected_url == actual_url @pytest.mark.parametrize("issue_number", (666, "666", "#666")) def test_issue_url(default_gitea_client: Gitea, issue_number: int | str): expected_url = "{server}/{owner}/{repo}/issues/{issue_number}".format( server=default_gitea_client.hvcs_domain.url, owner=default_gitea_client.owner, repo=default_gitea_client.repo_name, issue_number=str(issue_number).lstrip("#"), ) assert expected_url == default_gitea_client.issue_url(issue_num=issue_number) @pytest.mark.parametrize("pr_number", (666, "666", "#666")) def test_pull_request_url(default_gitea_client: Gitea, pr_number: int | str): expected_url = "{server}/{owner}/{repo}/pulls/{pr_number}".format( server=default_gitea_client.hvcs_domain.url, owner=default_gitea_client.owner, repo=default_gitea_client.repo_name, pr_number=str(pr_number).lstrip("#"), ) actual_url = default_gitea_client.pull_request_url(pr_number=pr_number) assert expected_url == actual_url @pytest.mark.parametrize("release_id", (42, 666)) def test_asset_upload_url(default_gitea_client: Gitea, release_id: int): expected_url = "{server}/repos/{owner}/{repo}/releases/{release_id}/assets".format( server=default_gitea_client.api_url, owner=default_gitea_client.owner, repo=default_gitea_client.repo_name, release_id=release_id, ) actual_url = default_gitea_client.asset_upload_url(release_id=release_id) assert expected_url == actual_url ############ # Tests which need http response mocking ############ gitea_matcher = re.compile(rf"^https://{Gitea.DEFAULT_DOMAIN}") gitea_api_matcher = re.compile( rf"^https://{Gitea.DEFAULT_DOMAIN}{Gitea.DEFAULT_API_PATH}" ) @pytest.mark.parametrize("status_code", [201]) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_release_succeeds( default_gitea_client: Gitea, mock_release_id: int, prerelease: bool, status_code: int, ): tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": prerelease, } with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "POST", gitea_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test actual_rtn_val = default_gitea_client.create_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == actual_rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("status_code", (400, 409)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_release_fails( default_gitea_client: Gitea, mock_release_id: int, prerelease: bool, status_code: int, ): tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": prerelease, } with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "POST", gitea_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test expecting an exeception to be raised with pytest.raises(HTTPError): default_gitea_client.create_release(tag, RELEASE_NOTES, prerelease) # Evaluate (expected -> actual) assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("token", (None, "super-token")) def test_should_create_release_using_token_or_netrc( default_gitea_client: Gitea, token: str | None, default_netrc_username: str, default_netrc_password: str, netrc_file: NetrcFileFn, clean_os_environment: dict[str, str], ): # Setup default_gitea_client.token = token default_gitea_client.session.auth = None if not token else TokenAuth(token) tag = "v1.0.0" expected_release_id = 1 expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": False, } expected_request_headers = set( ( {"Authorization": f"token {token}"} if token else { "Authorization": _basic_auth_str( default_netrc_username, default_netrc_password ) } ).items() ) # create netrc file # NOTE: write netrc file with DEFAULT_DOMAIN not DEFAULT_API_DOMAIN as can't # handle /api/v1 in file netrc = netrc_file(machine=default_gitea_client.DEFAULT_DOMAIN) mocked_os_environ = {**clean_os_environment, "NETRC": netrc.name} # Monkeypatch to create the Mocked environment with requests_mock.Mocker( session=default_gitea_client.session ) as m, mock.patch.dict(os.environ, mocked_os_environ, clear=True): # mock the response m.register_uri( "POST", gitea_api_matcher, json={"id": expected_release_id}, status_code=201 ) # Execute method under test ret_val = default_gitea_client.create_release(tag, RELEASE_NOTES) # Evaluate (expected -> actual) assert expected_release_id == ret_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() # calculate the match between expected and actual headers # We are not looking for an exact match, just that the headers we must have exist shared_headers = expected_request_headers.intersection( set(m.last_request.headers.items()) ) assert expected_request_headers == shared_headers, str.join( os.linesep, [ "Actual headers are missing some of the expected headers", f"Matching: {shared_headers}", f"Missing: {expected_request_headers - shared_headers}", f"Extra: {set(m.last_request.headers.items()) - expected_request_headers}", ], ) def test_request_has_no_auth_header_if_no_token_or_netrc(): tag = "v1.0.0" expected_release_id = 1 expected_num_requests = 1 expected_http_method = "POST" with mock.patch.dict(os.environ, {}, clear=True): client = Gitea(remote_url=f"git@{Gitea.DEFAULT_DOMAIN}:something/somewhere.git") expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=client.api_url, owner=client.owner, repo_name=client.repo_name, ) with requests_mock.Mocker(session=client.session) as m: # mock the response m.register_uri("POST", gitea_api_matcher, json={"id": 1}, status_code=201) # Execute method under test ret_val = client.create_release(tag, RELEASE_NOTES) # Evaluate (expected -> actual) assert expected_release_id == ret_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert "Authorization" not in m.last_request.headers @pytest.mark.parametrize( "resp_payload, status_code, expected_result", [ ({"id": 420}, 200, 420), ({}, 404, None), ], ) def test_get_release_id_by_tag( default_gitea_client: Gitea, resp_payload: dict[str, int], status_code: int, expected_result: int | None, ): # Setup tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "GET" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/tags/{tag}".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, tag=tag, ) ) with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "GET", gitea_api_matcher, json=resp_payload, status_code=status_code ) # Execute method under test rtn_val = default_gitea_client.get_release_id_by_tag(tag) # Evaluate (expected -> actual) assert expected_result == rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url @pytest.mark.parametrize("status_code", [201]) @pytest.mark.parametrize("mock_release_id", range(3)) def test_edit_release_notes_succeeds( default_gitea_client: Gitea, status_code: int, mock_release_id: int, ): # Setup expected_num_requests = 1 expected_http_method = "PATCH" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/{release_id}".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, release_id=mock_release_id, ) ) expected_request_body = {"body": RELEASE_NOTES} with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "PATCH", gitea_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test rtn_val = default_gitea_client.edit_release_notes( mock_release_id, RELEASE_NOTES ) # Evaluate (expected -> actual) assert mock_release_id == rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("status_code", (400, 404, 429, 500, 503)) @pytest.mark.parametrize("mock_release_id", range(3)) def test_edit_release_notes_fails( default_gitea_client: Gitea, status_code: int, mock_release_id: int, ): # Setup expected_num_requests = 1 expected_http_method = "PATCH" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/{release_id}".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, release_id=mock_release_id, ) ) expected_request_body = {"body": RELEASE_NOTES} with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "PATCH", gitea_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test expecting an exception to be raised with pytest.raises(HTTPError): default_gitea_client.edit_release_notes(mock_release_id, RELEASE_NOTES) assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() # Note - mocking as the logic for the create/update of a release # is covered by testing above, no point re-testing. @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_succeeds( default_gitea_client: Gitea, mock_release_id: int, prerelease: bool, ): tag = "v1.0.0" with mock.patch.object( default_gitea_client, default_gitea_client.create_release.__name__, return_value=mock_release_id, ) as mock_create_release, mock.patch.object( default_gitea_client, default_gitea_client.get_release_id_by_tag.__name__, return_value=mock_release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gitea_client, default_gitea_client.edit_release_notes.__name__, return_value=mock_release_id, ) as mock_edit_release_notes: # Execute in mock environment result = default_gitea_client.create_or_update_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == result mock_create_release.assert_called_once_with(tag, RELEASE_NOTES, prerelease) mock_get_release_id_by_tag.assert_not_called() mock_edit_release_notes.assert_not_called() @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_update_succeeds( default_gitea_client: Gitea, mock_release_id: int, prerelease: bool, ): tag = "v1.0.0" not_found = HTTPError("404 Not Found") not_found.response = Response() not_found.response.status_code = 404 with mock.patch.object( default_gitea_client, default_gitea_client.create_release.__name__, side_effect=not_found, ) as mock_create_release, mock.patch.object( default_gitea_client, default_gitea_client.get_release_id_by_tag.__name__, return_value=mock_release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gitea_client, default_gitea_client.edit_release_notes.__name__, return_value=mock_release_id, ) as mock_edit_release_notes: # Execute in mock environment result = default_gitea_client.create_or_update_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == result mock_create_release.assert_called_once() mock_get_release_id_by_tag.assert_called_once_with(tag) mock_edit_release_notes.assert_called_once_with(mock_release_id, RELEASE_NOTES) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_no_release_for_tag( default_gitea_client: Gitea, prerelease: bool, ): tag = "v1.0.0" not_found = HTTPError("404 Not Found") not_found.response = Response() not_found.response.status_code = 404 with mock.patch.object( default_gitea_client, default_gitea_client.create_release.__name__, side_effect=not_found, ) as mock_create_release, mock.patch.object( default_gitea_client, default_gitea_client.get_release_id_by_tag.__name__, return_value=None, ) as mock_get_release_id_by_tag, mock.patch.object( default_gitea_client, default_gitea_client.edit_release_notes.__name__, return_value=None, ) as mock_edit_release_notes: # Execute in mock environment expecting an exception to be raised with pytest.raises(ValueError): default_gitea_client.create_or_update_release( tag, RELEASE_NOTES, prerelease ) mock_create_release.assert_called_once() mock_get_release_id_by_tag.assert_called_once_with(tag) mock_edit_release_notes.assert_not_called() @pytest.mark.parametrize("status_code", (200, 201)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.usefixtures(init_example_project.__name__) def test_upload_release_asset_succeeds( default_gitea_client: Gitea, example_changelog_md: Path, status_code: int, mock_release_id: int, ): # Setup urlparams = {"name": example_changelog_md.name} expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{url}?{params}".format( url=default_gitea_client.asset_upload_url(mock_release_id), params=urlencode(urlparams), ) expected_changelog = example_changelog_md.read_bytes() with requests_mock.Mocker(session=default_gitea_client.session) as m: m.register_uri( "POST", gitea_api_matcher, json={"status": "ok"}, status_code=status_code ) result = default_gitea_client.upload_release_asset( release_id=mock_release_id, file=example_changelog_md.resolve(), label="doesn't matter could be None", ) # Evaluate (expected -> actual) assert result is True assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_changelog in m.last_request.body @pytest.mark.parametrize("status_code", (400, 500, 503)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.usefixtures(init_example_project.__name__) def test_upload_release_asset_fails( default_gitea_client: Gitea, example_changelog_md: Path, status_code: int, mock_release_id: int, ): with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "POST", gitea_api_matcher, json={"status": "error"}, status_code=status_code ) # Execute method under test expecting an exception to be raised with pytest.raises(HTTPError): default_gitea_client.upload_release_asset( release_id=mock_release_id, file=example_changelog_md.resolve(), label="doesn't matter could be None", ) # Note - mocking as the logic for uploading an asset # is covered by testing above, no point re-testing. def test_upload_dists_when_release_id_not_found(default_gitea_client: Gitea): tag = "v1.0.0" path = "doesn't matter" expected_num_uploads = 0 # Set up mock environment with mock.patch.object( default_gitea_client, default_gitea_client.get_release_id_by_tag.__name__, return_value=None, ) as mock_get_release_id_by_tag, mock.patch.object( default_gitea_client, default_gitea_client.upload_release_asset.__name__ ) as mock_upload_release_asset: # Execute method under test result = default_gitea_client.upload_dists(tag, path) # Evaluate assert expected_num_uploads == result mock_get_release_id_by_tag.assert_called_once_with(tag=tag) mock_upload_release_asset.assert_not_called() @pytest.mark.parametrize( "files, glob_pattern, upload_statuses, expected_num_uploads", [ (["foo.zip", "bar.whl"], "*.zip", [True], 1), (["foo.whl", "foo.egg", "foo.tar.gz"], "foo.*", [True, True, True], 3), # What if not built? ([], "*", [], 0), # What if wrong directory/other stuff in output dir/subfolder? (["specialconfig.yaml", "something.whl", "desc.md"], "*.yaml", [True], 1), (["specialconfig.yaml", "something.whl", "desc.md"], "*.md", [True], 1), ], ) def test_upload_dists_when_release_id_found( default_gitea_client: Gitea, files: list[str], glob_pattern: str, upload_statuses: list[bool], expected_num_uploads: int, ): release_id = 420 tag = "doesn't matter" matching_files = fnmatch.filter(files, glob_pattern) expected_files_uploaded = [mock.call(release_id, fn) for fn in matching_files] # Skip check as the files don't exist in filesystem mocked_isfile = mock.patch.object(os.path, "isfile", return_value=True) mocked_globber = mock.patch.object(glob, "glob", return_value=matching_files) # Set up mock environment with mocked_globber, mocked_isfile, mock.patch.object( default_gitea_client, default_gitea_client.get_release_id_by_tag.__name__, return_value=release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gitea_client, default_gitea_client.upload_release_asset.__name__, side_effect=upload_statuses, ) as mock_upload_release_asset: # Execute method under test num_uploads = default_gitea_client.upload_dists(tag, glob_pattern) # Evaluate (expected -> actual) assert expected_num_uploads == num_uploads mock_get_release_id_by_tag.assert_called_once_with(tag=tag) assert expected_files_uploaded == mock_upload_release_asset.call_args_list python-semantic-release-10.4.1/tests/unit/semantic_release/hvcs/test_github.py000066400000000000000000001112451506116242600276260ustar00rootroot00000000000000from __future__ import annotations import fnmatch import glob import os import re from typing import TYPE_CHECKING from unittest import mock from urllib.parse import urlencode import pytest import requests_mock from requests import HTTPError, Response, Session from requests.auth import _basic_auth_str from semantic_release.hvcs.github import Github from semantic_release.hvcs.token_auth import TokenAuth from tests.const import ( EXAMPLE_HVCS_DOMAIN, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER, RELEASE_NOTES, ) from tests.fixtures.example_project import init_example_project if TYPE_CHECKING: from pathlib import Path from typing import Generator from tests.conftest import NetrcFileFn @pytest.fixture def default_gh_client() -> Generator[Github, None, None]: remote_url = ( f"git@{Github.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git" ) with mock.patch.dict(os.environ, {}, clear=True): yield Github(remote_url=remote_url) @pytest.mark.parametrize( str.join( ", ", [ "patched_os_environ", "hvcs_domain", "hvcs_api_domain", "expected_hvcs_domain", "expected_hvcs_api_url", "insecure", ], ), [ ( # Default values (GitHub Enterprise Cloud) {}, None, None, "https://github.com", "https://api.github.com", False, ), ( # Explicitly set default values (GitHub Enterprise Cloud) {}, Github.DEFAULT_DOMAIN, Github.DEFAULT_API_DOMAIN, "https://github.com", "https://api.github.com", False, ), ( # Pull both locations from environment (GitHub Actions on Cloud) { "GITHUB_SERVER_URL": f"https://{Github.DEFAULT_DOMAIN}", "GITHUB_API_URL": f"https://{Github.DEFAULT_API_DOMAIN}", }, None, None, "https://github.com", "https://api.github.com", False, ), ( # Explicitly set custom values with full api path {}, EXAMPLE_HVCS_DOMAIN, f"{EXAMPLE_HVCS_DOMAIN}{Github.DEFAULT_API_PATH_ONPREM}", f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}{Github.DEFAULT_API_PATH_ONPREM}", False, ), ( # Explicitly defined api as subdomain # POSSIBLY WRONG ASSUMPTION of Api path for GitHub Enterprise Server (On Prem) {}, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://api.{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://api.{EXAMPLE_HVCS_DOMAIN}{Github.DEFAULT_API_PATH_ONPREM}", False, ), ( # Custom domain with path prefix {}, "special.custom.server/vcs", None, "https://special.custom.server/vcs", "https://special.custom.server/vcs/api/v3", False, ), ( # Gather domain from environment & imply api domain from server domain {"GITHUB_SERVER_URL": "https://special.custom.server/"}, None, None, "https://special.custom.server", "https://special.custom.server/api/v3", False, ), ( # Pull both locations from environment (On-prem Actions Env) { "GITHUB_SERVER_URL": "https://special.custom.server/", "GITHUB_API_URL": "https://special.custom.server/api/v3", }, None, None, "https://special.custom.server", "https://special.custom.server/api/v3", False, ), ( # Ignore environment & use provided parameter value (ie from user config) # then infer api domain from the parameter value based on default GitHub configurations {"GITHUB_SERVER_URL": "https://special.custom.server/vcs/"}, f"https://{EXAMPLE_HVCS_DOMAIN}", None, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}/api/v3", False, ), ( # Ignore environment & use provided parameter value (ie from user config) {"GITHUB_API_URL": "https://api.special.custom.server/"}, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}/api/v3", f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}/api/v3", False, ), ( # Allow insecure http connections explicitly {}, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/api/v3", f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/api/v3", True, ), ( # Allow insecure http connections explicitly & imply insecure api domain {}, f"http://{EXAMPLE_HVCS_DOMAIN}", None, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/api/v3", True, ), ( # Infer insecure connection from user configuration {}, EXAMPLE_HVCS_DOMAIN, EXAMPLE_HVCS_DOMAIN, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/api/v3", True, ), ( # Infer insecure connection from user configuration & imply insecure api domain {}, EXAMPLE_HVCS_DOMAIN, None, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/api/v3", True, ), ], ) @pytest.mark.parametrize( "remote_url", [ f"git@{Github.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", f"https://{Github.DEFAULT_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", ], ) @pytest.mark.parametrize("token", ("abc123", None)) def test_github_client_init( patched_os_environ: dict[str, str], hvcs_domain: str | None, hvcs_api_domain: str | None, expected_hvcs_domain: str, expected_hvcs_api_url: str, remote_url: str, token: str | None, insecure: bool, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): client = Github( remote_url=remote_url, hvcs_domain=hvcs_domain, hvcs_api_domain=hvcs_api_domain, token=token, allow_insecure=insecure, ) # Evaluate (expected -> actual) assert expected_hvcs_domain == str(client.hvcs_domain) assert expected_hvcs_api_url == str(client.api_url) assert token == client.token assert remote_url == client._remote_url assert hasattr(client, "session") assert isinstance(getattr(client, "session", None), Session) @pytest.mark.parametrize( "hvcs_domain, hvcs_api_domain, insecure", [ # Bad base domain schemes (f"ftp://{EXAMPLE_HVCS_DOMAIN}", None, False), (f"ftp://{EXAMPLE_HVCS_DOMAIN}", None, True), # Unallowed insecure connections when base domain is insecure (f"http://{EXAMPLE_HVCS_DOMAIN}", None, False), # Bad API domain schemes (None, f"ftp://api.{EXAMPLE_HVCS_DOMAIN}", False), (None, f"ftp://api.{EXAMPLE_HVCS_DOMAIN}", True), # Unallowed insecure connections when api domain is insecure (None, f"http://{EXAMPLE_HVCS_DOMAIN}", False), ], ) def test_github_client_init_with_invalid_scheme( hvcs_domain: str | None, hvcs_api_domain: str | None, insecure: bool, ): with pytest.raises(ValueError), mock.patch.dict(os.environ, {}, clear=True): Github( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", hvcs_domain=hvcs_domain, hvcs_api_domain=hvcs_api_domain, allow_insecure=insecure, ) @pytest.mark.parametrize( "patched_os_environ, expected_owner, expected_name", [ ({}, None, None), ({"GITHUB_REPOSITORY": "path/to/repo/foo"}, "path/to/repo", "foo"), ], ) def test_github_get_repository_owner_and_name( default_gh_client: Github, patched_os_environ: dict[str, str], expected_owner: str, expected_name: str, ): # expected results should be a tuple[namespace, repo_name] # when None, the default values are used which matches default_gh_client's setup expected_result = ( expected_owner or EXAMPLE_REPO_OWNER, expected_name or EXAMPLE_REPO_NAME, ) with mock.patch.dict(os.environ, patched_os_environ, clear=True): # Execute in mocked environment result = default_gh_client._get_repository_owner_and_name() # Evaluate (expected -> actual) assert expected_result == result def test_compare_url(default_gh_client: Github): # Setup start_rev = "revA" end_rev = "revB" expected_url = "{server}/{owner}/{repo}/compare/{from_rev}...{to_rev}".format( server=default_gh_client.hvcs_domain, owner=default_gh_client.owner, repo=default_gh_client.repo_name, from_rev=start_rev, to_rev=end_rev, ) # Execute method under test actual_url = default_gh_client.compare_url(from_rev=start_rev, to_rev=end_rev) # Evaluate (expected -> actual) assert expected_url == actual_url @pytest.mark.parametrize( "patched_os_environ, use_token, token, remote_url, expected_auth_url", [ ( {"GITHUB_ACTOR": "foo"}, False, "", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", ), ( {"GITHUB_ACTOR": "foo"}, True, "", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", ), ( {}, False, "aabbcc", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", ), ( {}, True, "aabbcc", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"https://aabbcc@{Github.DEFAULT_DOMAIN}/custom/example.git", ), ( {"GITHUB_ACTOR": "foo"}, False, "aabbcc", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", ), ( {"GITHUB_ACTOR": "foo"}, True, "aabbcc", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"https://foo:aabbcc@{Github.DEFAULT_DOMAIN}/custom/example.git", ), ], ) def test_remote_url( default_gh_client: Github, patched_os_environ: dict[str, str], use_token: bool, token: str, remote_url: str, expected_auth_url: str, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): default_gh_client._remote_url = remote_url default_gh_client.token = token # Execute method under test & Evaluate (expected -> actual) assert expected_auth_url == default_gh_client.remote_url(use_token=use_token) def test_commit_hash_url(default_gh_client: Github): sha = "hashashash" expected_url = "{server}/{owner}/{repo}/commit/{sha}".format( server=default_gh_client.hvcs_domain.url, owner=default_gh_client.owner, repo=default_gh_client.repo_name, sha=sha, ) assert expected_url == default_gh_client.commit_hash_url(sha) def test_commit_hash_url_w_custom_server(): """ Test the commit hash URL generation for a self-hosted Bitbucket server with prefix. ref: https://github.com/python-semantic-release/python-semantic-release/issues/1204 """ sha = "244f7e11bcb1e1ce097db61594056bc2a32189a0" expected_url = "{server}/{owner}/{repo}/commit/{sha}".format( server=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo", owner="foo", repo=EXAMPLE_REPO_NAME, sha=sha, ) with mock.patch.dict(os.environ, {}, clear=True): actual_url = Github( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo/foo/{EXAMPLE_REPO_NAME}.git", hvcs_domain=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo", ).commit_hash_url(sha) assert expected_url == actual_url @pytest.mark.parametrize("issue_number", (666, "666", "#666")) def test_issue_url(default_gh_client: Github, issue_number: str | int): expected_url = "{server}/{owner}/{repo}/issues/{issue_num}".format( server=default_gh_client.hvcs_domain.url, owner=default_gh_client.owner, repo=default_gh_client.repo_name, issue_num=str(issue_number).lstrip("#"), ) assert expected_url == default_gh_client.issue_url(issue_num=issue_number) @pytest.mark.parametrize("pr_number", (666, "666", "#666")) def test_pull_request_url(default_gh_client: Github, pr_number: int | str): expected_url = "{server}/{owner}/{repo}/pull/{pr_number}".format( server=default_gh_client.hvcs_domain, owner=default_gh_client.owner, repo=default_gh_client.repo_name, pr_number=str(pr_number).lstrip("#"), ) actual_url = default_gh_client.pull_request_url(pr_number=pr_number) assert expected_url == actual_url ############ # Tests which need http response mocking ############ github_upload_url = f"https://uploads.{Github.DEFAULT_DOMAIN}" github_matcher = re.compile(rf"^https://{Github.DEFAULT_DOMAIN}") github_api_matcher = re.compile(rf"^https://{Github.DEFAULT_API_DOMAIN}") github_upload_matcher = re.compile(rf"^{github_upload_url}") @pytest.mark.parametrize("status_code", (200, 201)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_release_succeeds( default_gh_client: Github, mock_release_id: int, prerelease: bool, status_code: int, ): tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": prerelease, } with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri( "POST", github_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test actual_rtn_val = default_gh_client.create_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == actual_rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("status_code", (400, 404, 429, 500, 503)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_release_fails( default_gh_client: Github, mock_release_id: int, prerelease: bool, status_code: int, ): tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": prerelease, } with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri( "POST", github_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test expecting an exeception to be raised with pytest.raises(HTTPError): default_gh_client.create_release(tag, RELEASE_NOTES, prerelease) # Evaluate (expected -> actual) assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("token", (None, "super-token")) def test_should_create_release_using_token_or_netrc( default_gh_client: Github, token: str | None, default_netrc_username: str, default_netrc_password: str, netrc_file: NetrcFileFn, clean_os_environment: dict[str, str], ): # Setup default_gh_client.token = token default_gh_client.session.auth = None if not token else TokenAuth(token) tag = "v1.0.0" expected_release_id = 1 expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": False, } expected_request_headers = set( ( {"Authorization": f"token {token}"} if token else { "Authorization": _basic_auth_str( default_netrc_username, default_netrc_password ) } ).items() ) # create netrc file netrc = netrc_file(machine=default_gh_client.DEFAULT_API_DOMAIN) mocked_os_environ = {**clean_os_environment, "NETRC": netrc.name} # Monkeypatch to create the Mocked environment with requests_mock.Mocker(session=default_gh_client.session) as m, mock.patch.dict( os.environ, mocked_os_environ, clear=True ): # mock the response m.register_uri( "POST", github_api_matcher, json={"id": expected_release_id}, status_code=201, ) # Execute method under test ret_val = default_gh_client.create_release(tag, RELEASE_NOTES) # Evaluate (expected -> actual) assert expected_release_id == ret_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() # calculate the match between expected and actual headers # We are not looking for an exact match, just that the headers we must have exist shared_headers = expected_request_headers.intersection( set(m.last_request.headers.items()) ) assert expected_request_headers == shared_headers, str.join( os.linesep, [ "Actual headers are missing some of the expected headers", f"Matching: {shared_headers}", f"Missing: {expected_request_headers - shared_headers}", f"Extra: {set(m.last_request.headers.items()) - expected_request_headers}", ], ) def test_request_has_no_auth_header_if_no_token_or_netrc(): tag = "v1.0.0" expected_release_id = 1 expected_num_requests = 1 expected_http_method = "POST" with mock.patch.dict(os.environ, {}, clear=True): client = Github( remote_url=f"git@{Github.DEFAULT_DOMAIN}:something/somewhere.git" ) expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=client.api_url, owner=client.owner, repo_name=client.repo_name, ) with requests_mock.Mocker(session=client.session) as m: # mock the response m.register_uri("POST", github_api_matcher, json={"id": 1}, status_code=201) # Execute method under test rtn_val = client.create_release(tag, RELEASE_NOTES) # Evaluate (expected -> actual) assert expected_release_id == rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert "Authorization" not in m.last_request.headers @pytest.mark.parametrize("status_code", [201]) @pytest.mark.parametrize("mock_release_id", range(3)) def test_edit_release_notes_succeeds( default_gh_client: Github, status_code: int, mock_release_id: int, ): # Setup expected_num_requests = 1 expected_http_method = "POST" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/{release_id}".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, release_id=mock_release_id, ) ) expected_request_body = {"body": RELEASE_NOTES} with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri( "POST", github_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test rtn_val = default_gh_client.edit_release_notes(mock_release_id, RELEASE_NOTES) # Evaluate (expected -> actual) assert mock_release_id == rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("status_code", (400, 404, 429, 500, 503)) @pytest.mark.parametrize("mock_release_id", range(3)) def test_edit_release_notes_fails( default_gh_client: Github, status_code: int, mock_release_id: int ): # Setup expected_num_requests = 1 expected_http_method = "POST" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/{release_id}".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, release_id=mock_release_id, ) ) expected_request_body = {"body": RELEASE_NOTES} with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri( "POST", github_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test expecting an exception to be raised with pytest.raises(HTTPError): default_gh_client.edit_release_notes(mock_release_id, RELEASE_NOTES) # Evaluate (expected -> actual) assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize( "resp_payload, status_code, expected_result", [ ({"id": 420, "status": "success"}, 200, 420), ({"error": "not found"}, 404, None), ({"error": "too many requests"}, 429, None), ({"error": "internal error"}, 500, None), ({"error": "temporarily unavailable"}, 503, None), ], ) def test_get_release_id_by_tag( default_gh_client: Github, resp_payload: dict[str, int], status_code: int, expected_result: int | None, ): # Setup tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "GET" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/tags/{tag}".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, tag=tag, ) ) with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri( "GET", github_api_matcher, json=resp_payload, status_code=status_code ) # Execute method under test rtn_val = default_gh_client.get_release_id_by_tag(tag) # Evaluate (expected -> actual) assert expected_result == rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url # Note - mocking as the logic for the create/update of a release # is covered by testing above, no point re-testing. @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_succeeds( default_gh_client: Github, mock_release_id: int, prerelease: bool, ): tag = "v1.0.0" with mock.patch.object( default_gh_client, default_gh_client.create_release.__name__, return_value=mock_release_id, ) as mock_create_release, mock.patch.object( default_gh_client, default_gh_client.get_release_id_by_tag.__name__, return_value=mock_release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gh_client, default_gh_client.edit_release_notes.__name__, return_value=mock_release_id, ) as mock_edit_release_notes: # Execute in mock environment result = default_gh_client.create_or_update_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == result mock_create_release.assert_called_once_with(tag, RELEASE_NOTES, prerelease) mock_get_release_id_by_tag.assert_not_called() mock_edit_release_notes.assert_not_called() @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_update_succeeds( default_gh_client: Github, mock_release_id: int, prerelease: bool, ): tag = "v1.0.0" not_found = HTTPError("404 Not Found") not_found.response = Response() not_found.response.status_code = 404 with mock.patch.object( default_gh_client, default_gh_client.create_release.__name__, side_effect=not_found, ) as mock_create_release, mock.patch.object( default_gh_client, default_gh_client.get_release_id_by_tag.__name__, return_value=mock_release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gh_client, default_gh_client.edit_release_notes.__name__, return_value=mock_release_id, ) as mock_edit_release_notes: # Execute in mock environment result = default_gh_client.create_or_update_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == result mock_create_release.assert_called_once() mock_get_release_id_by_tag.assert_called_once_with(tag) mock_edit_release_notes.assert_called_once_with(mock_release_id, RELEASE_NOTES) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_no_release_for_tag( default_gh_client: Github, prerelease: bool, ): tag = "v1.0.0" not_found = HTTPError("404 Not Found") not_found.response = Response() not_found.response.status_code = 404 with mock.patch.object( default_gh_client, default_gh_client.create_release.__name__, side_effect=not_found, ) as mock_create_release, mock.patch.object( default_gh_client, default_gh_client.get_release_id_by_tag.__name__, return_value=None, ) as mock_get_release_id_by_tag, mock.patch.object( default_gh_client, default_gh_client.edit_release_notes.__name__, return_value=None, ) as mock_edit_release_notes: # Execute in mock environment expecting an exception to be raised with pytest.raises(ValueError): default_gh_client.create_or_update_release(tag, RELEASE_NOTES, prerelease) mock_create_release.assert_called_once() mock_get_release_id_by_tag.assert_called_once_with(tag) mock_edit_release_notes.assert_not_called() def test_asset_upload_url(default_gh_client: Github): release_id = 1 expected_num_requests = 1 expected_http_method = "GET" expected_asset_upload_request_url = ( "{api_url}/repos/{owner}/{repo}/releases/{release_id}".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo=default_gh_client.repo_name, release_id=release_id, ) ) mocked_upload_url = ( "{upload_domain}/repos/{owner}/{repo}/releases/{release_id}/assets".format( upload_domain=github_upload_url, owner=default_gh_client.owner, repo=default_gh_client.repo_name, release_id=release_id, ) ) # '{?name,label}' are added by github.com at least, maybe custom too # https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#get-a-release resp_payload = { "upload_url": mocked_upload_url + "{?name,label}", "status": "success", } with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri("GET", github_api_matcher, json=resp_payload, status_code=200) # Execute method under test result = default_gh_client.asset_upload_url(release_id) # Evaluate (expected -> actual) assert mocked_upload_url == result assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_asset_upload_request_url == m.last_request.url @pytest.mark.parametrize("status_code", (200, 201)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.usefixtures(init_example_project.__name__) def test_upload_release_asset_succeeds( default_gh_client: Github, example_changelog_md: Path, status_code: int, mock_release_id: int, ): # Setup label = "abc123" urlparams = {"name": example_changelog_md.name, "label": label} release_upload_url = ( "{upload_domain}/repos/{owner}/{repo}/releases/{release_id}/assets".format( upload_domain=github_upload_url, owner=default_gh_client.owner, repo=default_gh_client.repo_name, release_id=mock_release_id, ) ) expected_num_requests = 2 expected_retrieve_upload_url_method = "GET" expected_upload_http_method = "POST" expected_upload_url = "{url}?{params}".format( url=release_upload_url, params=urlencode(urlparams), ) expected_changelog = example_changelog_md.read_bytes() json_get_up_url = { "status": "ok", "upload_url": release_upload_url + "{?name,label}", } with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the responses m.register_uri( "POST", github_upload_matcher, json={"status": "ok"}, status_code=status_code, ) m.register_uri( "GET", github_api_matcher, json=json_get_up_url, status_code=status_code ) # Execute method under test result = default_gh_client.upload_release_asset( release_id=mock_release_id, file=example_changelog_md.resolve(), label=label, ) # Evaluate (expected -> actual) assert result is True assert m.called assert expected_num_requests == len(m.request_history) get_req, post_req = m.request_history assert expected_retrieve_upload_url_method == get_req.method assert expected_upload_http_method == post_req.method assert expected_upload_url == post_req.url assert expected_changelog == post_req.body @pytest.mark.parametrize("status_code", (400, 404, 429, 500, 503)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.usefixtures(init_example_project.__name__) def test_upload_release_asset_fails( default_gh_client: Github, example_changelog_md: Path, status_code: int, mock_release_id: int, ): # Setup label = "abc123" upload_url = "{up_url}/repos/{owner}/{repo_name}/releases/{release_id}".format( up_url=github_upload_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, release_id=mock_release_id, ) json_get_up_url = { "status": "ok", "upload_url": upload_url, } with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the responses m.register_uri( "POST", github_upload_matcher, json={"message": "error"}, status_code=status_code, ) m.register_uri("GET", github_api_matcher, json=json_get_up_url, status_code=200) # Execute method under test expecting an exception to be raised with pytest.raises(HTTPError): default_gh_client.upload_release_asset( release_id=mock_release_id, file=example_changelog_md.resolve(), label=label, ) # Note - mocking as the logic for uploading an asset # is covered by testing above, no point re-testing. def test_upload_dists_when_release_id_not_found(default_gh_client): tag = "v1.0.0" path = "doesn't matter" expected_num_uploads = 0 # Set up mock environment with mock.patch.object( default_gh_client, default_gh_client.get_release_id_by_tag.__name__, return_value=None, ) as mock_get_release_id_by_tag, mock.patch.object( default_gh_client, default_gh_client.upload_release_asset.__name__ ) as mock_upload_release_asset: # Execute method under test result = default_gh_client.upload_dists(tag, path) # Evaluate assert expected_num_uploads == result mock_get_release_id_by_tag.assert_called_once_with(tag=tag) mock_upload_release_asset.assert_not_called() @pytest.mark.parametrize( "files, glob_pattern, upload_statuses, expected_num_uploads", [ (["foo.zip", "bar.whl"], "*.zip", [True], 1), (["foo.whl", "foo.egg", "foo.tar.gz"], "foo.*", [True, True, True], 3), # What if not built? ([], "*", [], 0), # What if wrong directory/other stuff in output dir/subfolder? (["specialconfig.yaml", "something.whl", "desc.md"], "*.yaml", [True], 1), (["specialconfig.yaml", "something.whl", "desc.md"], "*.md", [True], 1), ], ) def test_upload_dists_when_release_id_found( default_gh_client: Github, files: list[str], glob_pattern: str, upload_statuses: list[bool], expected_num_uploads: int, ): release_id = 420 tag = "doesn't matter" matching_files = fnmatch.filter(files, glob_pattern) expected_files_uploaded = [mock.call(release_id, fn) for fn in matching_files] # Skip check as the files don't exist in filesystem mocked_isfile = mock.patch.object(os.path, "isfile", return_value=True) mocked_globber = mock.patch.object(glob, "glob", return_value=matching_files) # Set up mock environment with mocked_globber, mocked_isfile, mock.patch.object( default_gh_client, default_gh_client.get_release_id_by_tag.__name__, return_value=release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gh_client, default_gh_client.upload_release_asset.__name__, side_effect=upload_statuses, ) as mock_upload_release_asset: # Execute method under test num_uploads = default_gh_client.upload_dists(tag, glob_pattern) # Evaluate (expected -> actual) assert expected_num_uploads == num_uploads mock_get_release_id_by_tag.assert_called_once_with(tag=tag) assert expected_files_uploaded == mock_upload_release_asset.call_args_list python-semantic-release-10.4.1/tests/unit/semantic_release/hvcs/test_gitlab.py000066400000000000000000000416741506116242600276160ustar00rootroot00000000000000from __future__ import annotations import os from typing import TYPE_CHECKING from unittest import mock import gitlab import gitlab.exceptions import gitlab.mixins import gitlab.v4.objects import pytest from semantic_release.hvcs.gitlab import Gitlab from tests.const import ( EXAMPLE_HVCS_DOMAIN, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER, RELEASE_NOTES, ) if TYPE_CHECKING: from typing import Generator # Note: there's nothing special about the value of these variables, # they're just constants for easier consistency with the faked objects A_GOOD_TAG = "v1.2.3" A_BAD_TAG = "v2.1.1-rc.1" A_LOCKED_TAG = "v0.9.0" A_MISSING_TAG = "v1.0.0+missing" # But note this is the only ref we're making a "fake" commit for, so # tests which need to query the remote for "a" ref, the exact sha for # which doesn't matter, all use this constant REF = "hashashash" @pytest.fixture def default_gl_project(example_git_https_url: str): return gitlab.Gitlab(url=example_git_https_url).projects.get( f"{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}", lazy=True ) @pytest.fixture def default_gl_client( example_git_https_url: str, default_gl_project: gitlab.v4.objects.Project, ) -> Generator[Gitlab, None, None]: gitlab_client = Gitlab(remote_url=example_git_https_url) # make sure that when project tries to get the project instance, we return the mock # that we control project_get_mock = mock.patch.object( gitlab_client._client.projects, gitlab_client._client.projects.get.__name__, return_value=default_gl_project, ) env_mock = mock.patch.dict(os.environ, {}, clear=True) with project_get_mock, env_mock: yield gitlab_client @pytest.mark.parametrize( "patched_os_environ, hvcs_domain, expected_hvcs_domain, insecure", # NOTE: GitLab does not have a different api domain [ # Default values ({}, None, f"https://{Gitlab.DEFAULT_DOMAIN}", False), ( # Gather domain from environment {"CI_SERVER_URL": "https://special.custom.server/"}, None, "https://special.custom.server", False, ), ( # Custom domain with path prefix (derives from environment) {"CI_SERVER_URL": "https://special.custom.server/vcs/"}, None, "https://special.custom.server/vcs", False, ), ( # Ignore environment & use provided parameter value (ie from user config) { "CI_SERVER_URL": "https://special.custom.server/", "CI_API_V4_URL": "https://special.custom.server/api/v3", }, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}", False, ), ( # Allow insecure http connections explicitly {}, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}", True, ), ( # Infer insecure connection from user configuration {}, EXAMPLE_HVCS_DOMAIN, f"http://{EXAMPLE_HVCS_DOMAIN}", True, ), ], ) @pytest.mark.parametrize( "remote_url", [ f"git@{Gitlab.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", f"https://{Gitlab.DEFAULT_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", ], ) @pytest.mark.parametrize("token", ("abc123", None)) def test_gitlab_client_init( patched_os_environ: dict[str, str], hvcs_domain: str | None, expected_hvcs_domain: str, remote_url: str, token: str | None, insecure: bool, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): client = Gitlab( remote_url=remote_url, hvcs_domain=hvcs_domain, token=token, allow_insecure=insecure, ) # Evaluate (expected -> actual) assert expected_hvcs_domain == client.hvcs_domain.url assert token == client.token assert remote_url == client._remote_url @pytest.mark.parametrize( "hvcs_domain, insecure", [ (f"ftp://{EXAMPLE_HVCS_DOMAIN}", False), (f"ftp://{EXAMPLE_HVCS_DOMAIN}", True), (f"http://{EXAMPLE_HVCS_DOMAIN}", False), ], ) def test_gitlab_client_init_with_invalid_scheme( hvcs_domain: str, insecure: bool, ): with pytest.raises(ValueError), mock.patch.dict(os.environ, {}, clear=True): Gitlab( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", hvcs_domain=hvcs_domain, allow_insecure=insecure, ) @pytest.mark.parametrize( "patched_os_environ, expected_owner, expected_name", [ ({}, None, None), ( {"CI_PROJECT_NAMESPACE": "path/to/repo", "CI_PROJECT_NAME": "foo"}, "path/to/repo", "foo", ), ], ) def test_gitlab_get_repository_owner_and_name( default_gl_client: Gitlab, example_git_https_url: str, patched_os_environ: dict[str, str], expected_owner: str | None, expected_name: str | None, ): # expected results should be a tuple[namespace, repo_name] and if both are None, # then the default value from GitLab class should be used expected_result = (expected_owner, expected_name) if expected_owner is None and expected_name is None: expected_result = super( Gitlab, default_gl_client )._get_repository_owner_and_name() with mock.patch.dict(os.environ, patched_os_environ, clear=True): # Execute in mocked environment result = Gitlab( remote_url=example_git_https_url, )._get_repository_owner_and_name() # Evaluate (expected -> actual) assert expected_result == result @pytest.mark.parametrize( "use_token, token, remote_url, expected_auth_url", [ ( False, "", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", ), ( True, "", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", ), ( False, "aabbcc", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", ), ( True, "aabbcc", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", f"https://gitlab-ci-token:aabbcc@{Gitlab.DEFAULT_DOMAIN}/custom/example.git", ), ], ) def test_remote_url( use_token: bool, token: str, remote_url: str, expected_auth_url: str, ): with mock.patch.dict(os.environ, {}, clear=True): gl_client = Gitlab(remote_url=remote_url, token=token) assert expected_auth_url == gl_client.remote_url(use_token=use_token) def test_compare_url(default_gl_client: Gitlab): start_rev = "revA" end_rev = "revB" expected_url = "{server}/{owner}/{repo}/-/compare/{from_rev}...{to_rev}".format( server=default_gl_client.hvcs_domain.url, owner=default_gl_client.owner, repo=default_gl_client.repo_name, from_rev=start_rev, to_rev=end_rev, ) actual_url = default_gl_client.compare_url(from_rev=start_rev, to_rev=end_rev) assert expected_url == actual_url def test_commit_hash_url(default_gl_client: Gitlab): expected_url = "{server}/{owner}/{repo}/-/commit/{sha}".format( server=default_gl_client.hvcs_domain.url, owner=default_gl_client.owner, repo=default_gl_client.repo_name, sha=REF, ) assert expected_url == default_gl_client.commit_hash_url(REF) def test_commit_hash_url_w_custom_server(): """ Test the commit hash URL generation for a self-hosted Bitbucket server with prefix. ref: https://github.com/python-semantic-release/python-semantic-release/issues/1204 """ sha = "244f7e11bcb1e1ce097db61594056bc2a32189a0" expected_url = "{server}/{owner}/{repo}/-/commit/{sha}".format( server=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo", owner="foo", repo=EXAMPLE_REPO_NAME, sha=sha, ) with mock.patch.dict(os.environ, {}, clear=True): actual_url = Gitlab( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo/foo/{EXAMPLE_REPO_NAME}.git", hvcs_domain=f"https://{EXAMPLE_HVCS_DOMAIN}/projects/demo-foo", ).commit_hash_url(sha) assert expected_url == actual_url @pytest.mark.parametrize("issue_number", (666, "666", "#666")) def test_issue_url(default_gl_client: Gitlab, issue_number: int | str): expected_url = "{server}/{owner}/{repo}/-/issues/{issue_num}".format( server=default_gl_client.hvcs_domain.url, owner=default_gl_client.owner, repo=default_gl_client.repo_name, issue_num=str(issue_number).lstrip("#"), ) actual_url = default_gl_client.issue_url(issue_num=issue_number) assert expected_url == actual_url @pytest.mark.parametrize("pr_number", (666, "666", "!666")) def test_pull_request_url(default_gl_client: Gitlab, pr_number: int | str): expected_url = "{server}/{owner}/{repo}/-/merge_requests/{pr_number}".format( server=default_gl_client.hvcs_domain.url, owner=default_gl_client.owner, repo=default_gl_client.repo_name, pr_number=str(pr_number).lstrip("!"), ) actual_url = default_gl_client.pull_request_url(pr_number=pr_number) assert expected_url == actual_url @pytest.mark.parametrize("tag", (A_GOOD_TAG, A_LOCKED_TAG)) def test_create_release_succeeds( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, tag: str ): with mock.patch.object( default_gl_project.releases, default_gl_project.releases.create.__name__, ) as mocked_create_release: result = default_gl_client.create_release(tag, RELEASE_NOTES) assert tag == result mocked_create_release.assert_called_once_with( { "name": tag, "tag_name": tag, "tag_message": tag, "description": RELEASE_NOTES, } ) def test_create_release_fails_with_bad_tag( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, ): bad_request = gitlab.GitlabCreateError("401 Unauthorized") mock_failed_create = mock.patch.object( default_gl_project.releases, default_gl_project.releases.create.__name__, side_effect=bad_request, ) with mock_failed_create, pytest.raises(gitlab.GitlabCreateError): default_gl_client.create_release(A_BAD_TAG, RELEASE_NOTES) @pytest.mark.parametrize("tag", (A_GOOD_TAG, A_LOCKED_TAG)) def test_update_release_succeeds(default_gl_client: Gitlab, tag: str): fake_release_obj = gitlab.v4.objects.ProjectReleaseManager( default_gl_client._client ).get(tag, lazy=True) fake_release_obj._attrs["name"] = tag with mock.patch.object( gitlab.mixins.SaveMixin, gitlab.mixins.SaveMixin.save.__name__, ) as mocked_update_release: release_id = default_gl_client.edit_release_notes( fake_release_obj, RELEASE_NOTES ) assert tag == release_id mocked_update_release.assert_called_once() assert RELEASE_NOTES == fake_release_obj.description # noqa: SIM300 def test_update_release_fails_with_missing_tag( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, ): fake_release_obj = gitlab.v4.objects.ProjectRelease( default_gl_project.manager, {"id": A_MISSING_TAG, "name": A_MISSING_TAG}, lazy=True, ) mocked_update_release = mock.patch.object( gitlab.mixins.SaveMixin, gitlab.mixins.SaveMixin.save.__name__, side_effect=gitlab.GitlabUpdateError, ) with mocked_update_release, pytest.raises(gitlab.GitlabUpdateError): default_gl_client.edit_release_notes(fake_release_obj, RELEASE_NOTES) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_succeeds( default_gl_client: Gitlab, prerelease: bool ): with mock.patch.object( default_gl_client, default_gl_client.create_release.__name__, return_value=A_GOOD_TAG, ) as mock_create_release, mock.patch.object( default_gl_client, default_gl_client.edit_release_notes.__name__, return_value=A_GOOD_TAG, ) as mock_edit_release_notes: # Execute in mock environment result = default_gl_client.create_or_update_release( A_GOOD_TAG, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert A_GOOD_TAG == result # noqa: SIM300 mock_create_release.assert_called_once_with( tag=A_GOOD_TAG, release_notes=RELEASE_NOTES, prerelease=prerelease ) mock_edit_release_notes.assert_not_called() def test_get_release_id_by_tag( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, ): dummy_release = default_gl_project.releases.get(A_GOOD_TAG, lazy=True) with mock.patch.object( default_gl_project.releases, default_gl_project.releases.get.__name__, return_value=dummy_release, ) as mocked_get_release_id: result = default_gl_client.get_release_by_tag(A_GOOD_TAG) assert dummy_release == result mocked_get_release_id.assert_called_once_with(A_GOOD_TAG) def test_get_release_id_by_tag_fails( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, ): mocked_get_release_id = mock.patch.object( default_gl_project.releases, default_gl_project.releases.get.__name__, side_effect=gitlab.exceptions.GitlabAuthenticationError, ) with pytest.raises( gitlab.exceptions.GitlabAuthenticationError ), mocked_get_release_id: default_gl_client.get_release_by_tag(A_GOOD_TAG) def test_get_release_id_by_tag_not_found( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, ): mocked_get_release_id = mock.patch.object( default_gl_project.releases, default_gl_project.releases.get.__name__, side_effect=gitlab.exceptions.GitlabGetError, ) with mocked_get_release_id: result = default_gl_client.get_release_by_tag(A_GOOD_TAG) assert result is None @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_update_succeeds( default_gl_client: Gitlab, prerelease: bool, ): bad_request = gitlab.GitlabCreateError("400 Bad Request") expected_release_obj = gitlab.v4.objects.ProjectRelease( gitlab.v4.objects.ProjectManager(default_gl_client._client), {"commit": {"id": "1"}, "name": A_GOOD_TAG}, lazy=True, ) with mock.patch.object( default_gl_client, default_gl_client.create_release.__name__, side_effect=bad_request, ), mock.patch.object( default_gl_client, default_gl_client.get_release_by_tag.__name__, return_value=expected_release_obj, ), mock.patch.object( default_gl_client, default_gl_client.edit_release_notes.__name__, return_value=A_GOOD_TAG, ) as mock_edit_release_notes: # Execute in mock environment default_gl_client.create_or_update_release( A_GOOD_TAG, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) mock_edit_release_notes.assert_called_once_with( release=expected_release_obj, release_notes=RELEASE_NOTES ) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_update_fails( default_gl_client: Gitlab, prerelease: bool, ): bad_request = gitlab.GitlabCreateError("400 Bad Request") not_found = gitlab.GitlabUpdateError("404 Not Found") fake_release_obj = gitlab.v4.objects.ProjectRelease( gitlab.v4.objects.ProjectManager(default_gl_client._client), {"commit": {"id": "1"}, "name": A_GOOD_TAG}, lazy=True, ) create_release_patch = mock.patch.object( default_gl_client, default_gl_client.create_release.__name__, side_effect=bad_request, ) edit_release_notes_patch = mock.patch.object( default_gl_client, default_gl_client.edit_release_notes.__name__, side_effect=not_found, ) get_release_by_id_patch = mock.patch.object( default_gl_client, default_gl_client.get_release_by_tag.__name__, return_value=fake_release_obj, ) # Execute in mocked environment expecting a GitlabUpdateError to be raised with create_release_patch, edit_release_notes_patch, get_release_by_id_patch: # noqa: SIM117 with pytest.raises(gitlab.GitlabUpdateError): default_gl_client.create_or_update_release( A_GOOD_TAG, RELEASE_NOTES, prerelease ) python-semantic-release-10.4.1/tests/unit/semantic_release/hvcs/test_token_auth.py000066400000000000000000000017061506116242600305050ustar00rootroot00000000000000import pytest from requests import Request from semantic_release.hvcs.token_auth import TokenAuth @pytest.fixture def example_request(): return Request( "GET", url="http://example.com", headers={ "User-Agent": "Python3", "Content-Type": "application/json", "Accept": "application/json", }, ) def test_token_eq(): t1 = TokenAuth("foo") t2 = TokenAuth("foo") assert t1 == t2 def test_token_neq(): t1 = TokenAuth("foo") t2 = TokenAuth("bar") assert t1 != t2 def test_call_token_auth_sets_headers(example_request): old_headers = example_request.headers.copy() old_headers.pop("Authorization", None) t1 = TokenAuth("foo") new_req = t1(example_request) auth_header = new_req.headers.pop("Authorization") assert auth_header == "token foo" assert new_req.headers == old_headers assert new_req.__dict__ == example_request.__dict__ python-semantic-release-10.4.1/tests/unit/semantic_release/hvcs/test_util.py000066400000000000000000000000001506116242600273030ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/test_helpers.py000066400000000000000000000233441506116242600270450ustar00rootroot00000000000000from typing import Iterable import pytest from semantic_release.helpers import ParsedGitUrl, parse_git_url, sort_numerically @pytest.mark.parametrize( "url, expected", [ ( "http://git.mycompany.com/username/myproject.git", ParsedGitUrl("http", "git.mycompany.com", "username", "myproject"), ), ( "http://subsubdomain.subdomain.company-net.com/username/myproject.git", ParsedGitUrl( "http", "subsubdomain.subdomain.company-net.com", "username", "myproject", ), ), ( "https://github.com/username/myproject.git", ParsedGitUrl("https", "github.com", "username", "myproject"), ), ( "https://gitlab.com/group/subgroup/myproject.git", ParsedGitUrl("https", "gitlab.com", "group/subgroup", "myproject"), ), ( "https://git.mycompany.com:4443/username/myproject.git", ParsedGitUrl("https", "git.mycompany.com:4443", "username", "myproject"), ), ( "https://subsubdomain.subdomain.company-net.com/username/myproject.git", ParsedGitUrl( "https", "subsubdomain.subdomain.company-net.com", "username", "myproject", ), ), ( "git://host.xz/path/to/repo.git/", ParsedGitUrl("git", "host.xz", "path/to", "repo"), ), ( "git://host.xz:9418/path/to/repo.git/", ParsedGitUrl("git", "host.xz:9418", "path/to", "repo"), ), ( "git@github.com:username/myproject.git", ParsedGitUrl("ssh", "git@github.com", "username", "myproject"), ), ( "git@subsubdomain.subdomain.company-net.com:username/myproject.git", ParsedGitUrl( "ssh", "git@subsubdomain.subdomain.company-net.com", "username", "myproject", ), ), ( "first.last_test-1@subsubdomain.subdomain.company-net.com:username/myproject.git", ParsedGitUrl( "ssh", "first.last_test-1@subsubdomain.subdomain.company-net.com", "username", "myproject", ), ), ( "ssh://git@github.com:3759/myproject.git", ParsedGitUrl("ssh", "git@github.com", "3759", "myproject"), ), ( "ssh://git@github.com:username/myproject.git", ParsedGitUrl("ssh", "git@github.com", "username", "myproject"), ), ( "ssh://git@bitbucket.org:7999/username/myproject.git", ParsedGitUrl("ssh", "git@bitbucket.org:7999", "username", "myproject"), ), ( "ssh://git@subsubdomain.subdomain.company-net.com:username/myproject.git", ParsedGitUrl( "ssh", "git@subsubdomain.subdomain.company-net.com", "username", "myproject", ), ), ( "git+ssh://git@github.com:username/myproject.git", ParsedGitUrl("ssh", "git@github.com", "username", "myproject"), ), ( "/Users/username/dev/remote/myproject.git", ParsedGitUrl("file", "", "Users/username/dev/remote", "myproject"), ), ( "file:///Users/username/dev/remote/myproject.git", ParsedGitUrl("file", "", "Users/username/dev/remote", "myproject"), ), ( "C:/Users/username/dev/remote/myproject.git", ParsedGitUrl("file", "", "C:/Users/username/dev/remote", "myproject"), ), ( "file:///C:/Users/username/dev/remote/myproject.git", ParsedGitUrl("file", "", "C:/Users/username/dev/remote", "myproject"), ), ], ) def test_parse_valid_git_urls(url: str, expected: ParsedGitUrl): """Test that a valid given git remote url is parsed correctly.""" assert expected == parse_git_url(url) @pytest.mark.parametrize( "url", [ "icmp://git", "abcdefghijklmnop.git", "../relative/path/to/repo.git", "http://domain/project.git", ], ) def test_parse_invalid_git_urls(url: str): """Test that an invalid git remote url throws a ValueError.""" with pytest.raises(ValueError): parse_git_url(url) @pytest.mark.parametrize( "unsorted_list, sorted_list, reverse, allow_hex", [ pytest.param( unsorted_list, sorted_list, reverse, allow_hex, id=f"({i}) {test_id}", ) for i, (test_id, unsorted_list, sorted_list, reverse, allow_hex) in enumerate( [ ( "Only numbers (with mixed digits, ASC)", ["5", "3", "10"], ["3", "5", "10"], False, False, ), ( "Only numbers (with mixed digits, DESC)", ["5", "3", "10"], ["10", "5", "3"], True, False, ), ( "Only PR numbers (ASC)", ["#5", "#3", "#10"], ["#3", "#5", "#10"], False, False, ), ( "Only PR numbers (DESC)", ["#5", "#3", "#10"], ["#10", "#5", "#3"], True, False, ), ( "Multiple prefixes (ASC)", ["#5", "PR#3", "PR#10", "#100"], ["#5", "#100", "PR#3", "PR#10"], False, False, ), ( "Multiple prefixes (DESC)", ["#5", "PR#3", "PR#10", "#100"], ["#100", "#5", "PR#10", "PR#3"], True, False, ), ( "No numbers mixed with mulitple prefixes (ASC)", ["word", "#100", "#1000", "PR#45"], ["#100", "#1000", "PR#45", "word"], False, False, ), ( "No numbers mixed with mulitple prefixes (DESC)", ["word", "#100", "#1000", "PR#45"], ["#1000", "#100", "PR#45", "word"], True, False, ), ( "Commit hash links in RST link format (ASC)", [".. _8ab43ed:", ".. _7ffed34:", ".. _a3b4c54:"], [".. _7ffed34:", ".. _8ab43ed:", ".. _a3b4c54:"], False, True, ), ( "Commit hash links in RST link format (DESC)", [".. _8ab43ed:", ".. _7ffed34:", ".. _a3b4c54:"], [".. _a3b4c54:", ".. _8ab43ed:", ".. _7ffed34:"], True, True, ), ( "Mixed numbers, PR numbers, and commit hash links in RST link format (ASC)", [ ".. _#5:", ".. _8ab43ed:", ".. _PR#3:", ".. _#20:", ".. _7ffed34:", ".. _#100:", ".. _a3b4c54:", ], [ ".. _7ffed34:", ".. _8ab43ed:", ".. _a3b4c54:", ".. _#5:", ".. _#20:", ".. _#100:", ".. _PR#3:", ], False, True, ), ( "Mixed numbers, PR numbers, and commit hash links in RST link format (DESC)", [ ".. _#5:", ".. _8ab43ed:", ".. _PR#3:", ".. _#20:", ".. _7ffed34:", ".. _#100:", ".. _a3b4c54:", ], [ ".. _a3b4c54:", ".. _8ab43ed:", ".. _7ffed34:", ".. _#100:", ".. _#20:", ".. _#5:", ".. _PR#3:", ], True, True, ), ( # No change since the prefixes are always alphabetical, asc/desc only is b/w numbers "Same numbers with different prefixes (ASC)", ["PR#5", "#5"], ["#5", "PR#5"], False, False, ), ( "Same numbers with different prefixes (DESC)", ["#5", "PR#5"], ["#5", "PR#5"], True, False, ), ], start=1, ) ], ) def test_sort_numerically( unsorted_list: Iterable[str], sorted_list: Iterable[str], reverse: bool, allow_hex: bool, ): actual_list = sort_numerically( iterable=unsorted_list, reverse=reverse, allow_hex=allow_hex, ) assert sorted_list == actual_list python-semantic-release-10.4.1/tests/unit/semantic_release/version/000077500000000000000000000000001506116242600254515ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/version/__init__.py000066400000000000000000000000001506116242600275500ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/version/declarations/000077500000000000000000000000001506116242600301215ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/version/declarations/__init__.py000066400000000000000000000000001506116242600322200ustar00rootroot00000000000000test_pattern_declaration.py000066400000000000000000000405771506116242600355120ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/version/declarationsfrom __future__ import annotations from pathlib import Path from re import compile as regexp from textwrap import dedent from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.version.declarations.enum import VersionStampType from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.declarations.pattern import PatternVersionDeclaration from semantic_release.version.version import Version from tests.fixtures.git_repo import default_tag_format_str if TYPE_CHECKING: from re import Pattern def test_pattern_declaration_is_version_replacer(): """ Given the class PatternVersionDeclaration or an instance of it, When the class is evaluated as a subclass or an instance of, Then the evaluation is true """ assert issubclass(PatternVersionDeclaration, IVersionReplacer) pattern_instance = PatternVersionDeclaration( "file", r"^version = (?P.*)", VersionStampType.NUMBER_FORMAT ) assert isinstance(pattern_instance, IVersionReplacer) @pytest.mark.parametrize( str.join( ", ", [ "replacement_def", "tag_format", "starting_contents", "resulting_contents", "next_version", "test_file", ], ), [ pytest.param( replacement_def, tag_format, starting_contents, resulting_contents, next_version, test_file, id=test_id, ) for test_file in ["test_file"] for next_version in ["1.2.3"] for test_id, replacement_def, tag_format, starting_contents, resulting_contents in [ ( "Default number format for python string variable", f"{test_file}:__version__", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with single quotes """__version__ = '1.0.0'""", f"""__version__ = '{next_version}'""", ), ( "Explicit number format for python string variable", f"{test_file}:__version__:{VersionStampType.NUMBER_FORMAT.value}", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with single quotes """__version__ = '1.0.0'""", f"""__version__ = '{next_version}'""", ), ( "Using default tag format for python string variable", f"{test_file}:__version__:{VersionStampType.TAG_FORMAT.value}", lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with single quotes """__version__ = 'v1.0.0'""", f"""__version__ = 'v{next_version}'""", ), ( "Using custom tag format for python string variable", f"{test_file}:__version__:{VersionStampType.TAG_FORMAT.value}", "module-v{version}", # Uses equals separator with double quotes '''__version__ = "module-v1.0.0"''', f'''__version__ = "module-v{next_version}"''', ), ( # Based on https://github.com/python-semantic-release/python-semantic-release/issues/1156 "Using default tag format for github actions uses-directive", f"{test_file}:repo/action-name:{VersionStampType.TAG_FORMAT.value}", lazy_fixture(default_tag_format_str.__name__), # Uses @ symbol separator without quotes or spaces """ uses: repo/action-name@v1.0.0""", f""" uses: repo/action-name@v{next_version}""", ), ( # Based on https://github.com/python-semantic-release/python-semantic-release/issues/1156 "Using custom tag format for github actions uses-directive", f"{test_file}:repo/action-name:{VersionStampType.TAG_FORMAT.value}", "module-v{version}", # Uses @ symbol separator without quotes or spaces """ uses: repo/action-name@module-v1.0.0""", f""" uses: repo/action-name@module-v{next_version}""", ), ( # Based on https://github.com/python-semantic-release/python-semantic-release/issues/846 "Using default tag format for multi-line yaml", f"{test_file}:newTag:{VersionStampType.TAG_FORMAT.value}", lazy_fixture(default_tag_format_str.__name__), # Uses colon separator without quotes dedent( """\ # kustomization.yaml images: - name: repo/image newTag: v1.0.0 """ ), dedent( f"""\ # kustomization.yaml images: - name: repo/image newTag: v{next_version} """ ), ), ( # Based on https://github.com/python-semantic-release/python-semantic-release/issues/846 "Using custom tag format for multi-line yaml", f"{test_file}:newTag:{VersionStampType.TAG_FORMAT.value}", "module-v{version}", # Uses colon separator without quotes dedent( """\ # kustomization.yaml images: - name: repo/image newTag: module-v1.0.0 """ ), dedent( f"""\ # kustomization.yaml images: - name: repo/image newTag: module-v{next_version} """ ), ), ( "Explicit number format for python walrus string variable", f"{test_file}:version:{VersionStampType.NUMBER_FORMAT.value}", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses walrus separator with single quotes """if version := '1.0.0': """, f"""if version := '{next_version}': """, ), ( "Explicit number format for requirements.txt file with double equals", f"{test_file}:my-package:{VersionStampType.NUMBER_FORMAT.value}", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses double equals separator """my-package == 1.0.0""", f"""my-package == {next_version}""", ), ( "Using default number format for multi-line & quoted json", f"{test_file}:version:{VersionStampType.NUMBER_FORMAT.value}", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses colon separator with double quotes dedent( """\ { "version": "1.0.0" } """ ), dedent( f"""\ {{ "version": "{next_version}" }} """ ), ), ( "Using default tag format for multi-line & quoted json", f"{test_file}:version:{VersionStampType.TAG_FORMAT.value}", lazy_fixture(default_tag_format_str.__name__), # Uses colon separator with double quotes dedent( """\ { "version": "v1.0.0" } """ ), dedent( f"""\ {{ "version": "v{next_version}" }} """ ), ), ] ], ) def test_pattern_declaration_from_definition( replacement_def: str, tag_format: str, starting_contents: str, resulting_contents: str, next_version: str, test_file: str, change_to_ex_proj_dir: None, ): """ Given a file with a formatted version string, When update_file_w_version() is called with a new version, Then the file is updated with the new version string in the specified tag or number format Version variables can be separated by either "=", ":", "@", or ':=' with optional whitespace between operator and variable name. The variable name or values can also be wrapped in either single or double quotes. """ # Setup: create file with initial contents expected_filepath = Path(test_file).resolve() expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = PatternVersionDeclaration.from_string_definition( replacement_def, tag_format, ) # Act: apply version change actual_file_modified = version_replacer.update_file_w_version( new_version=Version.parse(next_version, tag_format=tag_format), noop=False, ) # Evaluate actual_contents = Path(test_file).read_text() assert resulting_contents == actual_contents assert expected_filepath == actual_file_modified def test_pattern_declaration_no_file_change( default_tag_format_str: str, change_to_ex_proj_dir: None, ): """ Given a configured stamp file is already up-to-date, When update_file_w_version() is called with the same version, Then the file is not modified and no path is returned """ test_file = "test_file" expected_filepath = Path(test_file).resolve() next_version = Version.parse("1.2.3", tag_format=default_tag_format_str) starting_contents = f"""__version__ = '{next_version}'\n""" # Setup: create file with initial contents expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = PatternVersionDeclaration.from_string_definition( f"{test_file}:__version__:{VersionStampType.NUMBER_FORMAT.value}", tag_format=default_tag_format_str, ) # Act: apply version change file_modified = version_replacer.update_file_w_version( new_version=next_version, noop=False, ) # Evaluate actual_contents = expected_filepath.read_text() assert starting_contents == actual_contents assert file_modified is None def test_pattern_declaration_error_on_missing_file( default_tag_format_str: str, ): # Initialization should not fail or do anything intensive version_replacer = PatternVersionDeclaration.from_string_definition( "nonexistent_file:__version__", tag_format=default_tag_format_str, ) with pytest.raises(FileNotFoundError): version_replacer.update_file_w_version( new_version=Version.parse("1.2.3", tag_format=default_tag_format_str), noop=False, ) def test_pattern_declaration_no_version_in_file( default_tag_format_str: str, change_to_ex_proj_dir: None, ): test_file = "test_file" expected_filepath = Path(test_file).resolve() starting_contents = """other content\n""" # Setup: create file with initial contents expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = PatternVersionDeclaration.from_string_definition( f"{test_file}:__version__:{VersionStampType.NUMBER_FORMAT.value}", tag_format=default_tag_format_str, ) file_modified = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3", tag_format=default_tag_format_str), noop=False, ) # Evaluate actual_contents = expected_filepath.read_text() assert file_modified is None assert starting_contents == actual_contents def test_pattern_declaration_noop_is_noop( default_tag_format_str: str, change_to_ex_proj_dir: None, ): test_file = "test_file" expected_filepath = Path(test_file).resolve() starting_contents = """__version__ = '1.0.0'\n""" # Setup: create file with initial contents expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = PatternVersionDeclaration.from_string_definition( f"{test_file}:__version__:{VersionStampType.NUMBER_FORMAT.value}", tag_format=default_tag_format_str, ) # Act: apply version change file_modified = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3", tag_format=default_tag_format_str), noop=True, ) # Evaluate actual_contents = Path(test_file).read_text() assert starting_contents == actual_contents assert expected_filepath == file_modified def test_pattern_declaration_noop_warning_on_missing_file( default_tag_format_str: str, capsys: pytest.CaptureFixture[str], ): version_replacer = PatternVersionDeclaration.from_string_definition( "nonexistent_file:__version__", tag_format=default_tag_format_str, ) file_to_modify = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3", tag_format=default_tag_format_str), noop=True, ) # Evaluate assert file_to_modify is None assert ( "FILE NOT FOUND: cannot stamp version in non-existent file" in capsys.readouterr().err ) def test_pattern_declaration_noop_warning_on_no_version_in_file( default_tag_format_str: str, capsys: pytest.CaptureFixture[str], change_to_ex_proj_dir: None, ): test_file = "test_file" starting_contents = """other content\n""" # Setup: create file with initial contents Path(test_file).write_text(starting_contents) # Create Pattern Replacer version_replacer = PatternVersionDeclaration.from_string_definition( f"{test_file}:__version__:{VersionStampType.NUMBER_FORMAT.value}", tag_format=default_tag_format_str, ) file_to_modify = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3", tag_format=default_tag_format_str), noop=True, ) # Evaluate assert file_to_modify is None assert ( "VERSION PATTERN NOT FOUND: no version to stamp in file" in capsys.readouterr().err ) @pytest.mark.parametrize( "search_text, error_msg", [ ( search_text, error_msg, ) for error_msg, search_text in [ *[ ("must use 'version' as a named group", s_text) for s_text in [ r"^version = (.*)$", r"^version = (?P.*)", r"(?P.*)", ] ], ("Invalid regular expression", r"*"), ] ], ) def test_bad_version_regex_fails(search_text: str, error_msg: Pattern[str] | str): with pytest.raises(ValueError, match=error_msg): PatternVersionDeclaration( "doesn't matter", search_text, VersionStampType.NUMBER_FORMAT ) @pytest.mark.parametrize( "replacement_def, error_msg", [ pytest.param( replacement_def, error_msg, id=str(error_msg), ) for replacement_def, error_msg in [ ( f"{Path(__file__)!s}", regexp(r"Invalid replacement definition .*, missing ':'"), ), ( f"{Path(__file__)!s}:__version__:not_a_valid_version_type", "Invalid stamp type, must be one of:", ), ] ], ) def test_pattern_declaration_w_invalid_definition( default_tag_format_str: str, replacement_def: str, error_msg: Pattern[str] | str, ): """ check if PatternVersionDeclaration raises ValueError when loaded from invalid strings given in the config file """ with pytest.raises(ValueError, match=error_msg): PatternVersionDeclaration.from_string_definition( replacement_def, default_tag_format_str, ) test_toml_declaration.py000066400000000000000000000245701506116242600350030ustar00rootroot00000000000000python-semantic-release-10.4.1/tests/unit/semantic_release/version/declarationsfrom __future__ import annotations from pathlib import Path from re import compile as regexp from textwrap import dedent from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.version.declarations.enum import VersionStampType from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.declarations.toml import TomlVersionDeclaration from semantic_release.version.version import Version from tests.fixtures.git_repo import default_tag_format_str if TYPE_CHECKING: from re import Pattern def test_toml_declaration_is_version_replacer(): """ Given the class TomlVersionDeclaration or an instance of it, When the class is evaluated as a subclass or an instance of, Then the evaluation is true """ assert issubclass(TomlVersionDeclaration, IVersionReplacer) toml_instance = TomlVersionDeclaration( "file", "project.version", VersionStampType.NUMBER_FORMAT ) assert isinstance(toml_instance, IVersionReplacer) @pytest.mark.parametrize( str.join( ", ", [ "replacement_def", "tag_format", "starting_contents", "resulting_contents", "next_version", "test_file", ], ), [ pytest.param( replacement_def, tag_format, starting_contents, resulting_contents, next_version, test_file, id=test_id, ) for test_file in ["test_file.toml"] for next_version in ["1.2.3"] for test_id, replacement_def, tag_format, starting_contents, resulting_contents in [ ( "Default number format for project.version", f"{test_file}:project.version", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with single quotes dedent( """\ [project] version = '1.0.0' """ ), dedent( f"""\ [project] version = "{next_version}" """ ), ), ( "Explicit number format for project.version", f"{test_file}:project.version:{VersionStampType.NUMBER_FORMAT.value}", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with double quotes dedent( """\ [project] version = "1.0.0" """ ), dedent( f"""\ [project] version = "{next_version}" """ ), ), ( "Using default tag format for toml string variable", f"{test_file}:version:{VersionStampType.TAG_FORMAT.value}", lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with single quotes '''version = "v1.0.0"''', f'''version = "v{next_version}"''', ), ( "Using custom tag format for toml string variable", f"{test_file}:version:{VersionStampType.TAG_FORMAT.value}", "module-v{version}", # Uses equals separator with double quotes '''version = "module-v1.0.0"''', f'''version = "module-v{next_version}"''', ), ] ], ) def test_toml_declaration_from_definition( replacement_def: str, tag_format: str, starting_contents: str, resulting_contents: str, next_version: str, test_file: str, change_to_ex_proj_dir: None, ): """ Given a file with a formatted version string, When update_file_w_version() is called with a new version, Then the file is updated with the new version string in the specified tag or number format Version variables can be separated by either "=", ":", "@", or ':=' with optional whitespace between operator and variable name. The variable name or values can also be wrapped in either single or double quotes. """ # Setup: create file with initial contents expected_filepath = Path(test_file).resolve() expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = TomlVersionDeclaration.from_string_definition(replacement_def) # Act: apply version change actual_file_modified = version_replacer.update_file_w_version( new_version=Version.parse(next_version, tag_format=tag_format), noop=False, ) # Evaluate actual_contents = Path(test_file).read_text() assert resulting_contents == actual_contents assert expected_filepath == actual_file_modified def test_toml_declaration_no_file_change( change_to_ex_proj_dir: None, ): """ Given a configured stamp file is already up-to-date, When update_file_w_version() is called with the same version, Then the file is not modified and no path is returned """ test_file = "test_file" next_version = Version.parse("1.2.3") starting_contents = dedent( f"""\ [project] version = "{next_version}" """ ) # Setup: create file with initial contents Path(test_file).write_text(starting_contents) # Create Pattern Replacer version_replacer = TomlVersionDeclaration.from_string_definition( f"{test_file}:project.version:{VersionStampType.NUMBER_FORMAT.value}", ) # Act: apply version change file_modified = version_replacer.update_file_w_version( new_version=next_version, noop=False, ) # Evaluate actual_contents = Path(test_file).read_text() assert starting_contents == actual_contents assert file_modified is None def test_toml_declaration_error_on_missing_file(): # Initialization should not fail or do anything intensive version_replacer = TomlVersionDeclaration.from_string_definition( "nonexistent_file:version", ) with pytest.raises(FileNotFoundError): version_replacer.update_file_w_version( new_version=Version.parse("1.2.3"), noop=False, ) def test_toml_declaration_no_version_in_file( change_to_ex_proj_dir: None, ): test_file = "test_file" expected_filepath = Path(test_file).resolve() starting_contents = dedent( """\ [project] name = "example" """ ) # Setup: create file with initial contents expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = TomlVersionDeclaration.from_string_definition( f"{test_file}:project.version:{VersionStampType.NUMBER_FORMAT.value}", ) file_modified = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3"), noop=False, ) # Evaluate actual_contents = expected_filepath.read_text() assert file_modified is None assert starting_contents == actual_contents def test_toml_declaration_noop_is_noop( change_to_ex_proj_dir: None, ): test_file = "test_file" expected_filepath = Path(test_file).resolve() starting_contents = dedent( """\ [project] version = '1.0.0' """ ) # Setup: create file with initial contents expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = TomlVersionDeclaration.from_string_definition( f"{test_file}:project.version:{VersionStampType.NUMBER_FORMAT.value}", ) # Act: apply version change file_modified = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3"), noop=True, ) # Evaluate actual_contents = Path(test_file).read_text() assert starting_contents == actual_contents assert expected_filepath == file_modified def test_toml_declaration_noop_warning_on_missing_file( capsys: pytest.CaptureFixture[str], ): version_replacer = TomlVersionDeclaration.from_string_definition( "nonexistent_file:version", ) file_to_modify = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3"), noop=True, ) # Evaluate assert file_to_modify is None assert ( "FILE NOT FOUND: cannot stamp version in non-existent file" in capsys.readouterr().err ) def test_toml_declaration_noop_warning_on_no_version_in_file( capsys: pytest.CaptureFixture[str], change_to_ex_proj_dir: None, ): test_file = "test_file" starting_contents = dedent( """\ [project] name = "example" """ ) # Setup: create file with initial contents Path(test_file).write_text(starting_contents) # Create Pattern Replacer version_replacer = TomlVersionDeclaration.from_string_definition( f"{test_file}:project.version:{VersionStampType.NUMBER_FORMAT.value}", ) file_to_modify = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3"), noop=True, ) # Evaluate assert file_to_modify is None assert ( "VERSION PATTERN NOT FOUND: no version to stamp in file" in capsys.readouterr().err ) @pytest.mark.parametrize( "replacement_def, error_msg", [ pytest.param( replacement_def, error_msg, id=str(error_msg), ) for replacement_def, error_msg in [ ( f"{Path(__file__)!s}", regexp(r"Invalid TOML replacement definition .*, missing ':'"), ), ( f"{Path(__file__)!s}:tool.poetry.version:not_a_valid_version_type", "Invalid stamp type, must be one of:", ), ] ], ) def test_toml_declaration_w_invalid_definition( replacement_def: str, error_msg: Pattern[str] | str, ): """ check if TomlVersionDeclaration raises ValueError when loaded from invalid strings given in the config file """ with pytest.raises(ValueError, match=error_msg): TomlVersionDeclaration.from_string_definition(replacement_def) python-semantic-release-10.4.1/tests/unit/semantic_release/version/test_algorithm.py000066400000000000000000000247361506116242600310640ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING from unittest import mock import pytest from git import Commit, Repo, TagReference from semantic_release.enums import LevelBump from semantic_release.version.algorithm import ( _increment_version, _traverse_graph_for_commits, tags_and_versions, ) from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version from tests.fixtures.repos import repo_w_initial_commit if TYPE_CHECKING: from typing import Sequence @pytest.mark.usefixtures(repo_w_initial_commit.__name__) def test_traverse_graph_for_commits(): # Setup fake git graph """ * merge commit 6 (start) [3636363] |\ | * commit 5 [3535353] | * commit 4 [3434343] |/ * commit 3 [3333333] * commit 2 [3232323] * commit 1 [3131313] * v1.0.0 [3030303] """ repo = Repo() v1_commit = Commit(repo, binsha=b"0" * 20, parents=[]) class TagReferenceOverride(TagReference): commit = v1_commit # mocking the commit property v1_tag = TagReferenceOverride(repo, "refs/tags/v1.0.0", check_path=False) trunk = Commit( repo, binsha=b"3" * 20, parents=[ Commit( repo, binsha=b"2" * 20, parents=[ Commit(repo, binsha=b"1" * 20, parents=[v1_commit]), ], ), ], ) start_commit = Commit( repo, binsha=b"6" * 20, parents=[ trunk, Commit( repo, binsha=b"5" * 20, parents=[ Commit(repo, binsha=b"4" * 20, parents=[trunk]), ], ), ], ) commit_1 = trunk.parents[0].parents[0] commit_2 = trunk.parents[0] commit_3 = trunk commit_4 = start_commit.parents[1].parents[0] commit_5 = start_commit.parents[1] commit_6 = start_commit expected_commit_order = [ commit_6.hexsha, commit_5.hexsha, commit_4.hexsha, commit_3.hexsha, commit_2.hexsha, commit_1.hexsha, ] # Execute with mock.patch.object( repo, repo.iter_commits.__name__, return_value=iter([v1_commit]) ): actual_commit_order = [ commit.hexsha for commit in _traverse_graph_for_commits( head_commit=start_commit, latest_release_tag_str=v1_tag.name, ) ] # Verify assert expected_commit_order == actual_commit_order @pytest.mark.parametrize( "tags, sorted_tags", [ ( ["v1.0.0", "v1.1.0", "v1.1.1"], ["v1.1.1", "v1.1.0", "v1.0.0"], ), ( ["v1.1.0", "v1.0.0", "v1.1.1"], ["v1.1.1", "v1.1.0", "v1.0.0"], ), ( ["v1.1.1", "v1.1.0", "v1.0.0"], ["v1.1.1", "v1.1.0", "v1.0.0"], ), # Examples from https://semver.org/#spec-item-11 (or inspired, where not all # version structures are supported) ( ["v1.0.0", "v2.0.0", "v2.1.1", "v2.1.0"], ["v2.1.1", "v2.1.0", "v2.0.0", "v1.0.0"], ), ( [ "v1.0.0-rc.1", "v1.0.0-beta.2", "v1.0.0-beta.11", "v1.0.0-alpha.1", "v1.0.0-alpha.beta.1", "v1.0.0", ], [ "v1.0.0", "v1.0.0-rc.1", "v1.0.0-beta.11", "v1.0.0-beta.2", "v1.0.0-alpha.beta.1", "v1.0.0-alpha.1", ], ), ], ) def test_sorted_repo_tags_and_versions(tags: list[str], sorted_tags: list[str]): repo = Repo() translator = VersionTranslator() tagrefs = [repo.tag(tag) for tag in tags] actual = [t.name for t, _ in tags_and_versions(tagrefs, translator)] assert sorted_tags == actual @pytest.mark.parametrize( "tag_format, invalid_tags, valid_tags", [ ( "v{version}", ("test-v1.1.0", "v1.1.0-test-test"), [ "v1.0.0-rc.1", "v1.0.0-beta.2", "v1.0.0-beta.11", "v1.0.0-alpha.1", "v1.0.0-alpha.beta.1", "v1.0.0", ], ), ( "v{version}", ("0.3", "0.4"), [ "v1.0.0-rc.1", "v1.0.0-beta.2", "v1.0.0-beta.11", "v1.0.0-alpha.1", "v1.0.0-alpha.beta.1", "v1.0.0", ], ), ( r"(\w+--)?v{version}", ("v1.1.0-test-test", "test_v1.1.0"), [ "v1.0.0-rc.1", "test--v1.1.0", "v1.0.0-beta.2", "v1.0.0-beta.11", "v1.0.0-alpha.1", "v1.0.0-alpha.beta.1", "v1.0.0", ], ), ( r"(?Pfeature|fix)/v{version}--(?Pdev|stg|prod)", ("v1.1.0--test", "test_v1.1.0", "docs/v1.2.0--dev"), [ "feature/v1.0.0-rc.1--dev", "fix/v1.1.0--stg", "feature/v1.0.0-beta.2--stg", "fix/v1.0.0-beta.11--dev", "fix/v1.0.0-alpha.1--dev", "feature/v1.0.0-alpha.beta.1--dev", "feature/v1.0.0--prod", ], ), ], ) def test_tags_and_versions_ignores_invalid_tags_as_versions( tag_format: str, invalid_tags: Sequence[str], valid_tags: Sequence[str], ): repo = Repo() translator = VersionTranslator(tag_format=tag_format) tagrefs = [repo.tag(tag) for tag in (*valid_tags, *invalid_tags)] actual = [t.name for t, _ in tags_and_versions(tagrefs, translator)] assert set(valid_tags) == set(actual) @pytest.mark.parametrize( str.join( ", ", [ "latest_version", "latest_full_version", "level_bump", "prerelease", "prerelease_token", "expected_version", ], ), [ # NOTE: level_bump != LevelBump.NO_RELEASE, we return early in the # algorithm to discount this case # NOTE: you can only perform a PRERELEASE_REVISION bump on a previously # prerelease version and if you are requesting a prerelease ( "1.0.1-rc.1", "1.0.0", LevelBump.PRERELEASE_REVISION, True, "rc", "1.0.1-rc.2", ), *[ ( "1.0.0", "1.0.0", bump_level, prerelease, "rc", expected_version, ) for bump_level, prerelease, expected_version in [ (LevelBump.PATCH, False, "1.0.1"), (LevelBump.PATCH, True, "1.0.1-rc.1"), (LevelBump.MINOR, False, "1.1.0"), (LevelBump.MINOR, True, "1.1.0-rc.1"), (LevelBump.MAJOR, False, "2.0.0"), (LevelBump.MAJOR, True, "2.0.0-rc.1"), ] ], ( "1.2.4-rc.1", "1.2.3", LevelBump.PRERELEASE_REVISION, True, "rc", "1.2.4-rc.2", ), *[ ( "1.2.4-rc.1", "1.2.3", bump_level, prerelease, "rc", expected_version, ) for bump_level, prerelease, expected_version in [ (LevelBump.PATCH, False, "1.2.4"), (LevelBump.PATCH, True, "1.2.4-rc.2"), (LevelBump.MINOR, False, "1.3.0"), (LevelBump.MINOR, True, "1.3.0-rc.1"), (LevelBump.MAJOR, False, "2.0.0"), (LevelBump.MAJOR, True, "2.0.0-rc.1"), ] ], ( "2.0.0-rc.1", "1.19.3", LevelBump.PRERELEASE_REVISION, True, "rc", "2.0.0-rc.2", ), *[ ( "2.0.0-rc.1", "1.22.0", bump_level, prerelease, "rc", expected_version, ) for bump_level, prerelease, expected_version in [ (LevelBump.PATCH, False, "2.0.0"), (LevelBump.PATCH, True, "2.0.0-rc.2"), (LevelBump.MINOR, False, "2.0.0"), (LevelBump.MINOR, True, "2.0.0-rc.2"), (LevelBump.MAJOR, False, "2.0.0"), (LevelBump.MAJOR, True, "2.0.0-rc.2"), ] ], ], ) def test_increment_version_no_major_on_zero( latest_version: str, latest_full_version: str, level_bump: LevelBump, prerelease: bool, prerelease_token: str, expected_version: str, ): actual = _increment_version( latest_version=Version.parse(latest_version), latest_full_version=Version.parse(latest_full_version), level_bump=level_bump, prerelease=prerelease, prerelease_token=prerelease_token, major_on_zero=False, allow_zero_version=True, ) assert expected_version == str(actual) @pytest.mark.parametrize( "latest_version, latest_full_version, level_bump, prerelease, prerelease_token", [ # NOTE: level_bump != LevelBump.NO_RELEASE, we return early in the # algorithm to discount this case # NOTE: you can only perform a PRERELEASE_REVISION bump on a previously # prerelease version and if you are requesting a prerelease ( "1.0.0", "1.0.0", LevelBump.PRERELEASE_REVISION, False, "rc", ), ( "1.0.0", "1.0.0", LevelBump.PRERELEASE_REVISION, True, "rc", ), ], ) def test_increment_version_invalid_operation( latest_version: str, latest_full_version: str, level_bump: LevelBump, prerelease: bool, prerelease_token: str, ): with pytest.raises(ValueError): _increment_version( latest_version=Version.parse(latest_version), latest_full_version=Version.parse(latest_full_version), level_bump=level_bump, prerelease=prerelease, prerelease_token=prerelease_token, major_on_zero=False, allow_zero_version=True, ) python-semantic-release-10.4.1/tests/unit/semantic_release/version/test_translator.py000066400000000000000000000056011506116242600312550ustar00rootroot00000000000000import pytest from semantic_release.const import SEMVER_REGEX from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version from tests.const import ( A_FULL_VERSION_STRING, A_FULL_VERSION_STRING_WITH_BUILD_METADATA, A_PRERELEASE_VERSION_STRING, ) @pytest.fixture def a_full_version() -> Version: return Version.parse(A_FULL_VERSION_STRING) @pytest.fixture def a_prerelease_version() -> Version: return Version.parse(A_PRERELEASE_VERSION_STRING) @pytest.fixture def a_full_version_with_build_metadata() -> Version: return Version.parse(A_FULL_VERSION_STRING_WITH_BUILD_METADATA) @pytest.mark.parametrize( "version_string", [ A_FULL_VERSION_STRING, A_PRERELEASE_VERSION_STRING, A_FULL_VERSION_STRING_WITH_BUILD_METADATA, "3.2.3-alpha.dev3+local.12345", # Pretty much as complex an example as there is ], ) def test_succeeds_semver_regex_match(version_string: str): assert SEMVER_REGEX.fullmatch( version_string ), "a valid semantic version was not matched" @pytest.mark.parametrize( "invalid_version_str", ["v1.2.3", "2.1", "3.1.1..3", "4.1.1.dev3"], # PEP440 version ) def test_invalid_semver_not_matched(invalid_version_str: str): assert SEMVER_REGEX.fullmatch(invalid_version_str) is None @pytest.mark.parametrize("fmt", ["version", "{versioN}", "v{major}.{minor}.{patch}"]) def test_invalid_tag_format(fmt: str): with pytest.raises(ValueError) as err: VersionTranslator(tag_format=fmt) assert all(("tag_format" in str(err), "version" in str(err), fmt in str(err))) @pytest.mark.parametrize( "version_string", [ A_FULL_VERSION_STRING, A_PRERELEASE_VERSION_STRING, A_FULL_VERSION_STRING_WITH_BUILD_METADATA, ], ) @pytest.mark.parametrize( "tag_format, prerelease_token", [ ("v{version}", "dev"), ("v{version}", "alpha"), ("special-tagging-scheme-{version}", "rc"), ], ) def test_translator_converts_versions_with_default_formatting_rules( version_string: str, tag_format: str, prerelease_token: str ): translator = VersionTranslator( tag_format=tag_format, prerelease_token=prerelease_token ) expected_version_obj = Version.parse( version_string, prerelease_token=translator.prerelease_token ) expected_tag = tag_format.format(version=version_string) actual_version_obj = translator.from_string(version_string) actual_tag = translator.str_to_tag(version_string) # These are important assumptions for formatting into source files/tags/etc assert version_string == str(actual_version_obj) assert expected_version_obj == actual_version_obj assert expected_tag == actual_tag assert expected_version_obj == (translator.from_tag(expected_tag) or "") assert version_string == str(translator.from_tag(actual_tag) or "") python-semantic-release-10.4.1/tests/unit/semantic_release/version/test_version.py000066400000000000000000000227621506116242600305600ustar00rootroot00000000000000import operator import random import pytest from semantic_release.enums import LevelBump from semantic_release.errors import InvalidVersion from semantic_release.version.version import Version random.seed(0) EXAMPLE_VERSION_STRINGS = [ "1.0.0", "0.1.0", "0.0.1", "1.2.3", "0.2.4", "2.6.15", "13.0.0", "4.26.3", "1.0.0-rc.1", "4.26.0-beta.3", "5.3.1+local.123456", "9.22.0-alpha.4+build.9999", ] @pytest.mark.parametrize( "version_parts", # Major, minor, patch, prerelease_token, prerelease_revision, build_metadata [ (1, 0, 0, "rc", None, ""), (0, 1, 0, "rc", None, ""), (0, 0, 1, "rc", None, ""), (1, 2, 3, "rc", None, ""), (0, 2, 4, "rc", None, ""), (2, 6, 15, "rc", None, ""), (13, 0, 0, "rc", None, ""), (4, 26, 3, "rc", None, ""), (1, 0, 0, "rc", 1, ""), (4, 26, 3, "beta", 3, ""), (5, 3, 1, "rc", None, "local.123456"), (9, 22, 0, "alpha", 4, "build.9999"), (17, 0, 3, "custom-token", 12, ""), (17, 0, 3, "custom-token-3-6-9", 12, ""), (17, 0, 3, "custom-token", 12, "build.9999"), ], ) def test_version_parse_succeeds(version_parts): full = f"{version_parts[0]}.{version_parts[1]}.{version_parts[2]}" prerelease = f"-{version_parts[3]}.{version_parts[4]}" if version_parts[4] else "" build_metadata = f"+{version_parts[5]}" if version_parts[5] else "" version_str = f"{full}{prerelease}{build_metadata}" version = Version.parse(version_str) assert version.major == version_parts[0] assert version.minor == version_parts[1] assert version.patch == version_parts[2] assert version.prerelease_token == version_parts[3] assert version.prerelease_revision == version_parts[4] assert version.build_metadata == version_parts[5] assert str(version) == version_str @pytest.mark.parametrize( "bad_version", [ "v1.2.3", "2.3", "2.1.dev0", "2.1.4.post5", "alpha-1.2.3", "17.0.3-custom_token.12", "9", "4.1.2!-major", "%.*.?", "M2.m3.p1", ], ) def test_version_parse_fails(bad_version): with pytest.raises(InvalidVersion, match=f"{bad_version!r}"): Version.parse(bad_version) @pytest.fixture(params=EXAMPLE_VERSION_STRINGS) def a_version(request): return Version.parse(request.param) @pytest.mark.parametrize( "bad_format", ["non_unique_format", "case_sensitive_{Version}", "typo_{versione}"] ) def test_tag_format_must_contain_version_field(a_version, bad_format): with pytest.raises(ValueError, match=f"Invalid tag_format {bad_format!r}"): a_version.tag_format = bad_format @pytest.mark.parametrize( "tag_format", [ "v{version}", "dev-{version}", "release-_-{version}", "{version}-final", "{version}-demo-{version}", ], ) def test_change_tag_format_updates_as_tag_method(a_version, tag_format): a_version.tag_format = tag_format assert a_version.as_tag() == tag_format.format(version=str(a_version)) @pytest.mark.parametrize( "version_str, is_prerelease", [ ("1.0.0", False), ("14.33.10", False), ("2.1.1-rc.1", True), ("65.1.2-alpha.4", True), ("17.0.3-custom-token.12", True), ("17.0.3-custom-token.12+20220101000000", True), ("4.2.4+zzzz9000", False), ], ) def test_version_prerelease(version_str, is_prerelease): assert Version.parse(version_str).is_prerelease == is_prerelease def test_version_eq_succeeds(a_version): assert a_version == a_version assert a_version == str(a_version) @pytest.mark.parametrize( "lower_version, upper_version", [ ("1.0.0", "1.0.1"), ("1.0.0", "1.1.0"), ("1.0.0", "1.1.1"), ("1.0.0", "2.0.0"), ("1.0.0-rc.1", "1.0.0"), ("1.0.0-rc.1", "1.0.0-rc.2"), ("1.0.0-alpha.1", "1.0.1-beta.1"), ("1.0.1", "2.0.0-rc.1"), ], ) @pytest.mark.parametrize( "op", [ operator.lt, operator.le, operator.ne, lambda left, right: left < right, lambda left, right: left <= right, lambda left, right: left != right, ], ) def test_version_comparator_succeeds(lower_version, upper_version, op): left = Version.parse(lower_version) right = Version.parse(upper_version) # Test both on Version $op string and on Version $op Version assert op(left, right) assert op(left, str(right)) @pytest.mark.parametrize( "bad_input", [ 5, "foo-4.22", ["a", list, "of", 5, ("things",)], (1, 2, 3), {"foo": 12}, "v2.3.4", ], ) @pytest.mark.parametrize( "op", [ operator.lt, operator.le, operator.gt, operator.ge, ], ) def test_version_comparator_typeerror(bad_input, op): with pytest.raises(TypeError): op(Version.parse("1.4.5"), bad_input) def test_version_equality(a_version): assert a_version == Version.parse(str(a_version)) @pytest.mark.parametrize( "left, right", [("1.2.3+local.3", "1.2.3"), ("2.1.1-rc.1+build.7777", "2.1.1-rc.1")] ) def test_version_equality_when_build_metadata_lost(left, right): assert Version.parse(left) == Version.parse(right) @pytest.mark.parametrize( "lower_version, upper_version, level", [ ("1.0.0", "1.0.1", LevelBump.PATCH), ("1.0.0", "1.1.0", LevelBump.MINOR), ("1.0.0", "1.1.1", LevelBump.MINOR), ("1.0.0", "2.0.0", LevelBump.MAJOR), ("1.0.0-rc.1", "1.0.0", LevelBump.PRERELEASE_REVISION), ("1.0.1", "1.1.0-rc.1", LevelBump.MINOR), ("1.0.0-rc.1", "1.0.0-rc.2", LevelBump.PRERELEASE_REVISION), ("1.0.0-alpha.1", "1.0.1-beta.1", LevelBump.PATCH), ("1.0.1", "2.0.0-rc.1", LevelBump.MAJOR), ], ) def test_version_difference(lower_version, upper_version, level): left = Version.parse(lower_version) right = Version.parse(upper_version) assert (left - right) is level assert (right - left) is level @pytest.mark.parametrize( "bad_input", [ 5, "foo-4.22", ["a", list, "of", 5, ("things",)], (1, 2, 3), {"foo": 12}, "v2.3.4", ], ) def test_unimplemented_version_diff(bad_input): with pytest.raises(TypeError, match=r"unsupported operand type"): Version.parse("1.2.3") - bad_input @pytest.mark.parametrize( "current_version, prerelease_token, expected_prerelease_version", [ ("1.2.3", "rc", "1.2.3-rc.1"), ("1.1.1-rc.2", "rc", "1.1.1-rc.2"), ("2.0.0", "beta", "2.0.0-beta.1"), ("2.0.0-beta.1", "beta", "2.0.0-beta.1"), ], ) def test_version_to_prerelease_defaults( current_version, prerelease_token, expected_prerelease_version ): assert Version.parse(current_version).to_prerelease( token=prerelease_token ) == Version.parse(expected_prerelease_version) @pytest.mark.parametrize( "current_version, prerelease_token, revision, expected_prerelease_version", [ ("1.2.3", "rc", 3, "1.2.3-rc.3"), ("1.1.1-rc.1", "rc", 3, "1.1.1-rc.3"), ("2.0.0", "beta", None, "2.0.0-beta.1"), ("2.0.0-beta.1", "beta", 4, "2.0.0-beta.4"), ], ) def test_version_to_prerelease_with_params( current_version, prerelease_token, revision, expected_prerelease_version ): assert Version.parse(current_version).to_prerelease( token=prerelease_token, revision=revision ) == Version.parse(expected_prerelease_version) @pytest.mark.parametrize( "current_version, expected_final_version", [ ("1.2.3-rc.1", "1.2.3"), ("1.2.3", "1.2.3"), ("1.1.1-rc.2", "1.1.1"), ("2.0.0-beta.1", "2.0.0"), ("2.27.0", "2.27.0"), ], ) def test_version_finalize_version(current_version, expected_final_version): v1 = Version.parse(current_version) assert v1.finalize_version() == Version.parse( expected_final_version, prerelease_token=v1.prerelease_token ) @pytest.mark.parametrize( "current_version, level, new_version", [ ("1.2.3", LevelBump.NO_RELEASE, "1.2.3"), ("1.2.3", LevelBump.PRERELEASE_REVISION, "1.2.3-rc.1"), ("1.2.3", LevelBump.PATCH, "1.2.4"), ("1.2.3", LevelBump.MINOR, "1.3.0"), ("1.2.3", LevelBump.MAJOR, "2.0.0"), ("1.2.3-rc.1", LevelBump.NO_RELEASE, "1.2.3-rc.1"), ("1.2.3-rc.1", LevelBump.PRERELEASE_REVISION, "1.2.3-rc.2"), ("1.2.3-rc.1", LevelBump.PATCH, "1.2.4-rc.1"), ("1.2.3-rc.1", LevelBump.MINOR, "1.3.0-rc.1"), ("1.2.3-rc.1", LevelBump.MAJOR, "2.0.0-rc.1"), ], ) def test_version_bump_succeeds(current_version, level, new_version): cv = Version.parse(current_version) nv = cv.bump(level) assert nv == Version.parse(new_version) assert cv + level == Version.parse(new_version) @pytest.mark.parametrize("bad_level", [5, "patch", {"major": True}, [1, 1, 0, 0, 1], 1]) def test_version_bump_typeerror(bad_level): with pytest.raises(TypeError): Version.parse("1.2.3").bump(bad_level) def test_version_hashable(a_version): _ = {a_version: 4} assert True # NOTE: this might be a really good first candidate for hypothesis @pytest.mark.parametrize( "major, minor, patch, prerelease_revision", [tuple(random.choice(range(1, 100)) for _ in range(4)) for _ in range(10)], ) def test_prerelease_always_less_than_full(major, minor, patch, prerelease_revision): full = Version(major, minor, patch) pre = Version(major, minor, patch, prerelease_revision=prerelease_revision) assert pre < full python-semantic-release-10.4.1/tests/util.py000066400000000000000000000254501506116242600210370ustar00rootroot00000000000000from __future__ import annotations import importlib.util import os import secrets import shutil import stat import string from contextlib import contextmanager, suppress from pathlib import Path from re import compile as regexp from textwrap import indent from typing import TYPE_CHECKING, Tuple from git import Git, Repo from pydantic.dataclasses import dataclass from semantic_release.changelog.context import ChangelogMode, make_changelog_context from semantic_release.changelog.release_history import ReleaseHistory from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional import ConventionalCommitParser from semantic_release.commit_parser.token import ( ParsedCommit, ParsedMessageResult, ParseError, ParseResult, ) from semantic_release.enums import LevelBump from tests.const import SUCCESS_EXIT_CODE if TYPE_CHECKING: import filecmp from typing import Any, Callable, Generator, Iterable, TypeVar try: # Python 3.8 and 3.9 compatibility from typing_extensions import TypeAlias except ImportError: from typing import TypeAlias # type: ignore[attr-defined, no-redef] from unittest.mock import MagicMock from click.testing import Result as ClickInvokeResult from git import Commit from semantic_release.cli.config import RuntimeContext _R = TypeVar("_R") GitCommandWrapperType: TypeAlias = Git def get_func_qual_name(func: Callable) -> str: return str.join(".", filter(None, [func.__module__, func.__qualname__])) def assert_exit_code( exit_code: int, result: ClickInvokeResult, cli_cmd: list[str] ) -> bool: if result.exit_code != exit_code: raise AssertionError( str.join( os.linesep, [ f"{result.exit_code} != {exit_code} (actual != expected)", "", # Explain what command failed "Unexpected exit code from command:", indent(f"'{str.join(' ', cli_cmd)}'", " " * 2), "", # Add indentation to each line for stdout & stderr "stdout:", indent(result.stdout, " " * 2) if result.stdout.strip() else "", "stderr:", indent(result.stderr, " " * 2) if result.stderr.strip() else "", ], ) ) return True def assert_successful_exit_code(result: ClickInvokeResult, cli_cmd: list[str]) -> bool: return assert_exit_code(SUCCESS_EXIT_CODE, result, cli_cmd) def get_full_qualname(callable_obj: Callable) -> str: parts = filter( None, [ callable_obj.__module__, ( None if callable_obj.__class__.__name__ == "function" else callable_obj.__class__.__name__ ), callable_obj.__name__, ], ) return str.join(".", parts) def copy_dir_tree(src_dir: Path | str, dst_dir: Path | str) -> None: """Compatibility wrapper for shutil.copytree""" # python3.8+ shutil.copytree( src=str(src_dir), dst=str(dst_dir), dirs_exist_ok=True, ) def remove_dir_tree(directory: Path | str = ".", force: bool = False) -> None: """ Compatibility wrapper for shutil.rmtree Helpful for deleting directories with .git/* files, which usually have some read-only permissions """ def on_read_only_error(_func, path, _exc_info): os.chmod(path, stat.S_IWRITE) os.unlink(path) # Prevent error if already deleted or never existed, that is our desired state with suppress(FileNotFoundError): shutil.rmtree(str(directory), onerror=on_read_only_error if force else None) def dynamic_python_import(file_path: Path, module_name: str): spec = importlib.util.spec_from_file_location(module_name, str(file_path)) module = importlib.util.module_from_spec(spec) # type: ignore[arg-type] spec.loader.exec_module(module) # type: ignore[union-attr] return module @contextmanager def temporary_working_directory(directory: Path | str) -> Generator[None, None, None]: cwd = os.getcwd() os.chdir(str(directory)) try: yield finally: os.chdir(cwd) def shortuid(length: int = 8) -> str: alphabet = string.ascii_lowercase + string.digits return "".join(secrets.choice(alphabet) for _ in range(length)) def add_text_to_file(repo: Repo, filename: str, text: str | None = None): """Makes a deterministic file change for testing""" tgt_file = Path(filename).resolve().absolute() # TODO: switch to Path.is_relative_to() when 3.8 support is deprecated # if not tgt_file.is_relative_to(Path(repo.working_dir).resolve().absolute()): if Path(repo.working_dir).resolve().absolute() not in tgt_file.parents: raise ValueError( f"File {tgt_file} is not relative to the repository working directory {repo.working_dir}" ) tgt_file.parent.mkdir(parents=True, exist_ok=True) file_contents = tgt_file.read_text() if tgt_file.exists() else "" line_number = len(file_contents.splitlines()) file_contents += f"{line_number} {text or 'default text'}{os.linesep}" tgt_file.write_text(file_contents, encoding="utf-8") repo.index.add(tgt_file) def flatten_dircmp(dcmp: filecmp.dircmp) -> list[str]: return ( dcmp.diff_files + dcmp.left_only + dcmp.right_only + [ os.sep.join((directory, file)) for directory, cmp in dcmp.subdirs.items() for file in flatten_dircmp(cmp) ] ) def xdist_sort_hack(it: Iterable[_R]) -> Iterable[_R]: """ hack for pytest-xdist https://pytest-xdist.readthedocs.io/en/latest/known-limitations.html#workarounds taking an iterable of params for a pytest.mark.parametrize decorator, this ensures a deterministic sort so that xdist can always work Being able to use `pytest -nauto` is a huge speedup on testing """ return dict(enumerate(it)).values() def actions_output_to_dict(output: str) -> dict[str, str]: single_line_var_pattern = regexp(r"^(?P\w+)=(?P.*?)\r?$") multiline_var_pattern = regexp(r"^(?P\w+?)< ReleaseHistory: with Repo(str(runtime_context.repo_dir)) as git_repo: release_history = ReleaseHistory.from_git_history( git_repo, runtime_context.version_translator, runtime_context.commit_parser, runtime_context.changelog_excluded_commit_patterns, ) changelog_context = make_changelog_context( hvcs_client=runtime_context.hvcs_client, release_history=release_history, mode=ChangelogMode.INIT, prev_changelog_file=Path("CHANGELOG.md"), insertion_flag="", mask_initial_release=runtime_context.changelog_mask_initial_release, ) changelog_context.bind_to_environment(runtime_context.template_environment) return release_history def prepare_mocked_git_command_wrapper_type( **mocked_methods: MagicMock, ) -> type[GitCommandWrapperType]: """ Mock the specified methods of `Repo.GitCommandWrapperType` (`git.Git` by default). Initialized `MagicMock` objects are passed as keyword arguments, where the argument name is the name of the method to mock. For example, the following invocation mocks the `Repo.git.push()` command / method. Arrange: >>> from unittest.mock import MagicMock >>> from git import Repo >>> mocked_push = MagicMock() >>> cls = prepare_mocked_git_command_wrapper_type(push=mocked_push) >>> Repo.GitCommandWrapperType = cls >>> repo = Repo(".") Act: >>> repo.git.push("origin", "master") Assert: >>> mocked_push.assert_called_once() """ class MockGitCommandWrapperType(Git): def __getattr__(self, name: str) -> Any: try: return object.__getattribute__(self, f"mocked_{name}") except AttributeError: return super().__getattr__(name) for name, method in mocked_methods.items(): setattr(MockGitCommandWrapperType, f"mocked_{name}", method) return MockGitCommandWrapperType class CustomParserWithNoOpts(CommitParser[ParseResult, ParserOptions]): def parse(self, commit: Commit) -> ParsedCommit | ParseError: return ParsedCommit( bump=LevelBump.NO_RELEASE, type="", scope="", descriptions=[], breaking_descriptions=[], commit=commit, ) @dataclass class CustomParserOpts(ParserOptions): allowed_tags: Tuple[str, ...] = ("new", "custom") # noqa: UP006 class CustomParserWithOpts(CommitParser[ParseResult, CustomParserOpts]): parser_options = CustomParserOpts def parse(self, commit: Commit) -> ParsedCommit | ParseError: return ParsedCommit( bump=LevelBump.NO_RELEASE, type="custom", scope="", descriptions=[], breaking_descriptions=[], commit=commit, ) class IncompleteCustomParser(CommitParser): pass class CustomConventionalParserWithIgnorePatterns(ConventionalCommitParser): def parse(self, commit: Commit) -> ParsedCommit | ParseError: if not (parse_msg_result := super().parse_message(str(commit.message))): return ParseError(commit, "Unable to parse commit") return ParsedCommit.from_parsed_message_result( commit, ParsedMessageResult( **{ **parse_msg_result._asdict(), "include_in_changelog": bool( not str(commit.message).startswith("chore") ), } ), )