pax_global_header00006660000000000000000000000064147121465400014516gustar00rootroot0000000000000052 comment=09946cb6246e700c4cfbdb880dda5751472249aa sentry-python-2.18.0/000077500000000000000000000000001471214654000144515ustar00rootroot00000000000000sentry-python-2.18.0/.coveragerc36000066400000000000000000000003431471214654000167430ustar00rootroot00000000000000# This is the coverage.py config for Python 3.6 # The config for newer Python versions is in pyproject.toml. [run] branch = true omit = /tmp/* */tests/* */.venv/* [report] exclude_lines = if TYPE_CHECKING: sentry-python-2.18.0/.craft.yml000066400000000000000000000023511471214654000163520ustar00rootroot00000000000000minVersion: 0.34.1 targets: - name: pypi includeNames: /^sentry[_\-]sdk.*$/ - name: gh-pages - name: registry sdks: pypi:sentry-sdk: - name: github - name: aws-lambda-layer # This regex that matches the version is taken from craft: # https://github.com/getsentry/craft/blob/8d77c38ddbe4be59f98f61b6e42952ca087d3acd/src/utils/version.ts#L11 includeNames: /^sentry-python-serverless-\bv?(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(?:-?([\da-z-]+(?:\.[\da-z-]+)*))?(?:\+([\da-z-]+(?:\.[\da-z-]+)*))?\b.zip$/ layerName: SentryPythonServerlessSDK compatibleRuntimes: - name: python versions: # The number of versions must be, at most, the maximum number of # runtimes AWS Lambda permits for a layer (currently 15). # On the other hand, AWS Lambda does not support every Python runtime. # The supported runtimes are available in the following link: # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html - python3.7 - python3.8 - python3.9 - python3.10 - python3.11 license: MIT - name: sentry-pypi internalPypiRepo: getsentry/pypi changelog: CHANGELOG.md changelogPolicy: auto sentry-python-2.18.0/.flake8000066400000000000000000000013141471214654000156230ustar00rootroot00000000000000[flake8] extend-ignore = # Handled by black (Whitespace before ':' -- handled by black) E203, # Handled by black (Line too long) E501, # Sometimes not possible due to execution order (Module level import is not at top of file) E402, # I don't care (Do not assign a lambda expression, use a def) E731, # does not apply to Python 2 (redundant exception types by flake8-bugbear) B014, # I don't care (Lowercase imported as non-lowercase by pep8-naming) N812, # is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls) N804, extend-exclude=checkouts,lol* exclude = # gRCP generated files grpc_test_service_pb2.py grpc_test_service_pb2_grpc.py sentry-python-2.18.0/.github/000077500000000000000000000000001471214654000160115ustar00rootroot00000000000000sentry-python-2.18.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001471214654000201745ustar00rootroot00000000000000sentry-python-2.18.0/.github/ISSUE_TEMPLATE/bug.yml000066400000000000000000000023151471214654000214750ustar00rootroot00000000000000name: 🐞 Bug Report description: Tell us about something that's not working the way we (probably) intend. body: - type: dropdown id: type attributes: label: How do you use Sentry? options: - Sentry Saas (sentry.io) - Self-hosted/on-premise validations: required: true - type: input id: version attributes: label: Version description: Which SDK version? placeholder: ex. 1.5.2 validations: required: true - type: textarea id: repro attributes: label: Steps to Reproduce description: How can we see what you're seeing? Specific is terrific. placeholder: |- 1. What 2. you 3. did. Extra points for also including the output of `pip freeze --all`. validations: required: true - type: textarea id: expected attributes: label: Expected Result validations: required: true - type: textarea id: actual attributes: label: Actual Result description: Logs? Screenshots? Yes, please. validations: required: true - type: markdown attributes: value: |- ## Thanks 🙏 validations: required: false sentry-python-2.18.0/.github/ISSUE_TEMPLATE/config.yml000066400000000000000000000002471471214654000221670ustar00rootroot00000000000000blank_issues_enabled: true contact_links: - name: Support Request url: https://sentry.io/support about: Use our dedicated support channel for paid accounts. sentry-python-2.18.0/.github/ISSUE_TEMPLATE/feature.yml000066400000000000000000000017421471214654000223560ustar00rootroot00000000000000name: 💡 Feature Request description: Create a feature request for sentry-python SDK. labels: 'enhancement' body: - type: markdown attributes: value: Thanks for taking the time to file a feature request! Please fill out this form as completely as possible. - type: textarea id: problem attributes: label: Problem Statement description: A clear and concise description of what you want and what your use case is. placeholder: |- I want to make whirled peas, but Sentry doesn't blend. validations: required: true - type: textarea id: expected attributes: label: Solution Brainstorm description: We know you have bright ideas to share ... share away, friend. placeholder: |- Add a blender to Sentry. validations: required: true - type: markdown attributes: value: |- ## Thanks 🙏 Check our [triage docs](https://open.sentry.io/triage/) for what to expect next. sentry-python-2.18.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000005551471214654000216170ustar00rootroot00000000000000 --- Thank you for contributing to `sentry-python`! Please add tests to validate your changes, and lint your code using `tox -e linters`. Running the test suite on your PR might require maintainer approval. The AWS Lambda tests additionally require a maintainer to add a special label, and they will fail until this label is added. sentry-python-2.18.0/.github/dependabot.yml000066400000000000000000000013511471214654000206410ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: pip directory: "/" schedule: interval: weekly open-pull-requests-limit: 10 allow: - dependency-type: direct - dependency-type: indirect ignore: - dependency-name: sphinx versions: - ">= 2.4.a, < 2.5" - dependency-name: werkzeug versions: - "> 0.15.5, < 1" - dependency-name: werkzeug versions: - ">= 1.0.a, < 1.1" - dependency-name: mypy versions: - "0.800" - dependency-name: sphinx versions: - 3.4.3 - package-ecosystem: gitsubmodule directory: "/" schedule: interval: weekly open-pull-requests-limit: 10 - package-ecosystem: "github-actions" directory: "/" schedule: interval: weekly open-pull-requests-limit: 10 sentry-python-2.18.0/.github/workflows/000077500000000000000000000000001471214654000200465ustar00rootroot00000000000000sentry-python-2.18.0/.github/workflows/ci.yml000066400000000000000000000054271471214654000211740ustar00rootroot00000000000000name: CI on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: lint: name: Lint Sources runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: 3.12 - run: | pip install tox tox -e linters check-ci-config: name: Check CI config runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: 3.12 - run: | pip install jinja2 python scripts/split-tox-gh-actions/split-tox-gh-actions.py --fail-on-changes build_lambda_layer: name: Build Package runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: 3.12 - name: Setup build cache uses: actions/cache@v4 id: build_cache with: path: ${{ env.CACHED_BUILD_PATHS }} key: ${{ env.BUILD_CACHE_KEY }} - name: Build Packages run: | echo "Creating directory containing Python SDK Lambda Layer" # This will also trigger "make dist" that creates the Python packages make aws-lambda-layer - name: Upload Python Packages uses: actions/upload-artifact@v4 with: name: artifact-build_lambda_layer path: | dist/* if-no-files-found: 'error' # since this artifact will be merged, compression is not necessary compression-level: '0' docs: name: Build SDK API Doc runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: 3.12 - run: | make apidocs cd docs/_build && zip -r gh-pages ./ - uses: actions/upload-artifact@v4 with: name: artifact-docs path: | docs/_build/gh-pages.zip if-no-files-found: 'error' # since this artifact will be merged, compression is not necessary compression-level: '0' merge: name: Create Release Artifact runs-on: ubuntu-latest needs: [build_lambda_layer, docs] steps: - uses: actions/upload-artifact/merge@v4 with: # Craft expects release assets from github to be a single artifact named after the sha. name: ${{ github.sha }} pattern: artifact-* delete-merged: true sentry-python-2.18.0/.github/workflows/codeql-analysis.yml000066400000000000000000000055201471214654000236630ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: - master - sentry-sdk-2.0 pull_request: schedule: - cron: '18 18 * * 3' # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read jobs: analyze: permissions: actions: read # for github/codeql-action/init to get workflow details contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/autobuild to send a status report name: Analyze runs-on: ubuntu-latest strategy: fail-fast: false matrix: language: [ 'python' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more: # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed steps: - name: Checkout repository uses: actions/checkout@v4.2.1 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project # uses a compiled language #- run: | # make bootstrap # make release - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 sentry-python-2.18.0/.github/workflows/enforce-license-compliance.yml000066400000000000000000000011621471214654000257420ustar00rootroot00000000000000name: Enforce License Compliance on: push: branches: - master - main - release/* - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} jobs: enforce-license-compliance: runs-on: ubuntu-latest steps: - name: 'Enforce License Compliance' uses: getsentry/action-enforce-license-compliance@main with: fossa_api_key: ${{ secrets.FOSSA_API_KEY }} sentry-python-2.18.0/.github/workflows/release.yml000066400000000000000000000016521471214654000222150ustar00rootroot00000000000000name: Release on: workflow_dispatch: inputs: version: description: Version to release required: true force: description: Force a release even when there are release-blockers (optional) required: false merge_target: description: Target branch to merge into. Uses the default branch as a fallback (optional) required: false jobs: release: runs-on: ubuntu-latest name: "Release a new version" steps: - uses: actions/checkout@v4.2.1 with: token: ${{ secrets.GH_RELEASE_PAT }} fetch-depth: 0 - name: Prepare release uses: getsentry/action-prepare-release@v1 env: GITHUB_TOKEN: ${{ secrets.GH_RELEASE_PAT }} with: version: ${{ github.event.inputs.version }} force: ${{ github.event.inputs.force }} merge_target: ${{ github.event.inputs.merge_target }} sentry-python-2.18.0/.github/workflows/scripts/000077500000000000000000000000001471214654000215355ustar00rootroot00000000000000sentry-python-2.18.0/.github/workflows/scripts/trigger_tests_on_label.py000066400000000000000000000046061471214654000266350ustar00rootroot00000000000000#!/usr/bin/env python3 import argparse import json import os from urllib.parse import quote from urllib.request import Request, urlopen LABEL = "Trigger: tests using secrets" def _has_write(repo_id: int, username: str, *, token: str) -> bool: req = Request( f"https://api.github.com/repositories/{repo_id}/collaborators/{username}/permission", headers={"Authorization": f"token {token}"}, ) contents = json.load(urlopen(req, timeout=10)) return contents["permission"] in {"admin", "write"} def _remove_label(repo_id: int, pr: int, label: str, *, token: str) -> None: quoted_label = quote(label) req = Request( f"https://api.github.com/repositories/{repo_id}/issues/{pr}/labels/{quoted_label}", method="DELETE", headers={"Authorization": f"token {token}"}, ) urlopen(req) def main() -> int: parser = argparse.ArgumentParser() parser.add_argument("--repo-id", type=int, required=True) parser.add_argument("--pr", type=int, required=True) parser.add_argument("--event", required=True) parser.add_argument("--username", required=True) parser.add_argument("--label-names", type=json.loads, required=True) args = parser.parse_args() token = os.environ["GITHUB_TOKEN"] write_permission = _has_write(args.repo_id, args.username, token=token) if ( not write_permission # `reopened` is included here due to close => push => reopen and args.event in {"synchronize", "reopened"} and LABEL in args.label_names ): print(f"Invalidating label [{LABEL}] due to code change...") _remove_label(args.repo_id, args.pr, LABEL, token=token) args.label_names.remove(LABEL) if write_permission or LABEL in args.label_names: print("Permissions passed!") print(f"- has write permission: {write_permission}") print(f"- has [{LABEL}] label: {LABEL in args.label_names}") return 0 else: print("Permissions failed!") print(f"- has write permission: {write_permission}") print(f"- has [{LABEL}] label: {LABEL in args.label_names}") print(f"- args.label_names: {args.label_names}") print( f"Please have a collaborator add the [{LABEL}] label once they " f"have reviewed the code to trigger tests." ) return 1 if __name__ == "__main__": raise SystemExit(main()) sentry-python-2.18.0/.github/workflows/test-integrations-ai.yml000066400000000000000000000150461471214654000246510ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py name: Test AI on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-ai-latest: name: AI (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.9","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test anthropic latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-anthropic-latest" - name: Test cohere latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-cohere-latest" - name: Test langchain latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-langchain-latest" - name: Test openai latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-openai-latest" - name: Test huggingface_hub latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-huggingface_hub-latest" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true test-ai-pinned: name: AI (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.9","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test anthropic pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-anthropic" - name: Test cohere pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-cohere" - name: Test langchain pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-langchain" - name: Test openai pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-openai" - name: Test huggingface_hub pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-huggingface_hub" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true check_required_tests: name: All pinned AI tests passed needs: test-ai-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-ai-pinned.result, 'failure') || contains(needs.test-ai-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/.github/workflows/test-integrations-aws-lambda.yml000066400000000000000000000113471471214654000262700ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py name: Test AWS Lambda on: push: branches: - master - release/** - sentry-sdk-2.0 # XXX: We are using `pull_request_target` instead of `pull_request` because we want # this to run on forks with access to the secrets necessary to run the test suite. # Prefer to use `pull_request` when possible. pull_request_target: types: [labeled, opened, reopened, synchronize] # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read # `write` is needed to remove the `Trigger: tests using secrets` label pull-requests: write env: SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID: ${{ secrets.SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID }} SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY: ${{ secrets.SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY }} BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: check-permissions: name: permissions check runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v4.2.1 with: persist-credentials: false - name: Check permissions on PR if: github.event_name == 'pull_request_target' run: | python3 -uS .github/workflows/scripts/trigger_tests_on_label.py \ --repo-id ${{ github.event.repository.id }} \ --pr ${{ github.event.number }} \ --event ${{ github.event.action }} \ --username "$ARG_USERNAME" \ --label-names "$ARG_LABEL_NAMES" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # these can contain special characters ARG_USERNAME: ${{ github.event.pull_request.user.login }} ARG_LABEL_NAMES: ${{ toJSON(github.event.pull_request.labels.*.name) }} - name: Check permissions on repo branch if: github.event_name == 'push' run: true test-aws_lambda-pinned: name: AWS Lambda (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.9"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] needs: check-permissions steps: - uses: actions/checkout@v4.2.1 with: ref: ${{ github.event.pull_request.head.sha || github.ref }} - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test aws_lambda pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-aws_lambda" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true check_required_tests: name: All pinned AWS Lambda tests passed needs: test-aws_lambda-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-aws_lambda-pinned.result, 'failure') || contains(needs.test-aws_lambda-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/.github/workflows/test-integrations-cloud-computing.yml000066400000000000000000000144471471214654000273750ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py name: Test Cloud Computing on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-cloud_computing-latest: name: Cloud Computing (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.8","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test boto3 latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-boto3-latest" - name: Test chalice latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-chalice-latest" - name: Test cloud_resource_context latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-cloud_resource_context-latest" - name: Test gcp latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-gcp-latest" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true test-cloud_computing-pinned: name: Cloud Computing (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.9","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test boto3 pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-boto3" - name: Test chalice pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-chalice" - name: Test cloud_resource_context pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-cloud_resource_context" - name: Test gcp pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gcp" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true check_required_tests: name: All pinned Cloud Computing tests passed needs: test-cloud_computing-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-cloud_computing-pinned.result, 'failure') || contains(needs.test-cloud_computing-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/.github/workflows/test-integrations-common.yml000066400000000000000000000061701471214654000255460ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py name: Test Common on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-common-pinned: name: Common (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test common pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-common" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true check_required_tests: name: All pinned Common tests passed needs: test-common-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-common-pinned.result, 'failure') || contains(needs.test-common-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/.github/workflows/test-integrations-data-processing.yml000066400000000000000000000174441471214654000273470ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py name: Test Data Processing on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-data_processing-latest: name: Data Processing (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.10","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Start Redis uses: supercharge/redis-github-action@1.8.0 - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test arq latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-arq-latest" - name: Test beam latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-beam-latest" - name: Test celery latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-celery-latest" - name: Test dramatiq latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-dramatiq-latest" - name: Test huey latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-huey-latest" - name: Test ray latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-ray-latest" - name: Test rq latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-rq-latest" - name: Test spark latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-spark-latest" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true test-data_processing-pinned: name: Data Processing (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Start Redis uses: supercharge/redis-github-action@1.8.0 - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test arq pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-arq" - name: Test beam pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-beam" - name: Test celery pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-celery" - name: Test dramatiq pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-dramatiq" - name: Test huey pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-huey" - name: Test ray pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-ray" - name: Test rq pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-rq" - name: Test spark pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-spark" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true check_required_tests: name: All pinned Data Processing tests passed needs: test-data_processing-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-data_processing-pinned.result, 'failure') || contains(needs.test-data_processing-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/.github/workflows/test-integrations-databases.yml000066400000000000000000000203751471214654000262100ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py name: Test Databases on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-databases-latest: name: Databases (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.8","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - uses: getsentry/action-clickhouse-in-ci@v1 - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test asyncpg latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-asyncpg-latest" - name: Test clickhouse_driver latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-clickhouse_driver-latest" - name: Test pymongo latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-pymongo-latest" - name: Test redis latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-redis-latest" - name: Test redis_py_cluster_legacy latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-redis_py_cluster_legacy-latest" - name: Test sqlalchemy latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-sqlalchemy-latest" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true test-databases-pinned: name: Databases (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - uses: getsentry/action-clickhouse-in-ci@v1 - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test asyncpg pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-asyncpg" - name: Test clickhouse_driver pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-clickhouse_driver" - name: Test pymongo pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pymongo" - name: Test redis pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-redis" - name: Test redis_py_cluster_legacy pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-redis_py_cluster_legacy" - name: Test sqlalchemy pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-sqlalchemy" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true check_required_tests: name: All pinned Databases tests passed needs: test-databases-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-databases-pinned.result, 'failure') || contains(needs.test-databases-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/.github/workflows/test-integrations-graphql.yml000066400000000000000000000142551471214654000257170ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py name: Test GraphQL on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-graphql-latest: name: GraphQL (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.8","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test ariadne latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-ariadne-latest" - name: Test gql latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-gql-latest" - name: Test graphene latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-graphene-latest" - name: Test strawberry latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-strawberry-latest" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true test-graphql-pinned: name: GraphQL (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test ariadne pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-ariadne" - name: Test gql pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gql" - name: Test graphene pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-graphene" - name: Test strawberry pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-strawberry" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true check_required_tests: name: All pinned GraphQL tests passed needs: test-graphql-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-graphql-pinned.result, 'failure') || contains(needs.test-graphql-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/.github/workflows/test-integrations-miscellaneous.yml000066400000000000000000000166051471214654000271250ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py name: Test Miscellaneous on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-miscellaneous-latest: name: Miscellaneous (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.8","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test launchdarkly latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-launchdarkly-latest" - name: Test loguru latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-loguru-latest" - name: Test openfeature latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-openfeature-latest" - name: Test opentelemetry latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-opentelemetry-latest" - name: Test potel latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-potel-latest" - name: Test pure_eval latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-pure_eval-latest" - name: Test trytond latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-trytond-latest" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true test-miscellaneous-pinned: name: Miscellaneous (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test launchdarkly pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-launchdarkly" - name: Test loguru pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-loguru" - name: Test openfeature pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-openfeature" - name: Test opentelemetry pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-opentelemetry" - name: Test potel pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-potel" - name: Test pure_eval pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pure_eval" - name: Test trytond pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-trytond" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true check_required_tests: name: All pinned Miscellaneous tests passed needs: test-miscellaneous-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-miscellaneous-pinned.result, 'failure') || contains(needs.test-miscellaneous-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/.github/workflows/test-integrations-networking.yml000066400000000000000000000143251471214654000264460ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py name: Test Networking on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-networking-latest: name: Networking (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.8","3.9","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test gevent latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-gevent-latest" - name: Test grpc latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-grpc-latest" - name: Test httpx latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-httpx-latest" - name: Test requests latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-requests-latest" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true test-networking-pinned: name: Networking (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test gevent pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gevent" - name: Test grpc pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-grpc" - name: Test httpx pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-httpx" - name: Test requests pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-requests" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true check_required_tests: name: All pinned Networking tests passed needs: test-networking-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-networking-pinned.result, 'failure') || contains(needs.test-networking-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/.github/workflows/test-integrations-web-frameworks-1.yml000066400000000000000000000166001471214654000273460ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py name: Test Web Frameworks 1 on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-web_frameworks_1-latest: name: Web Frameworks 1 (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.8","3.10","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test django latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-django-latest" - name: Test flask latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-flask-latest" - name: Test starlette latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-starlette-latest" - name: Test fastapi latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-fastapi-latest" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true test-web_frameworks_1-pinned: name: Web Frameworks 1 (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test django pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-django" - name: Test flask pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-flask" - name: Test starlette pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-starlette" - name: Test fastapi pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-fastapi" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true check_required_tests: name: All pinned Web Frameworks 1 tests passed needs: test-web_frameworks_1-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-web_frameworks_1-pinned.result, 'failure') || contains(needs.test-web_frameworks_1-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/.github/workflows/test-integrations-web-frameworks-2.yml000066400000000000000000000206421471214654000273500ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py name: Test Web Frameworks 2 on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-web_frameworks_2-latest: name: Web Frameworks 2 (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test aiohttp latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-aiohttp-latest" - name: Test asgi latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-asgi-latest" - name: Test bottle latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-bottle-latest" - name: Test falcon latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-falcon-latest" - name: Test litestar latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-litestar-latest" - name: Test pyramid latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-pyramid-latest" - name: Test quart latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-quart-latest" - name: Test sanic latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-sanic-latest" - name: Test starlite latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-starlite-latest" - name: Test tornado latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-tornado-latest" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true test-web_frameworks_2-pinned: name: Web Frameworks 2 (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.11","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4.2.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase - name: Test aiohttp pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-aiohttp" - name: Test asgi pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-asgi" - name: Test bottle pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-bottle" - name: Test falcon pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-falcon" - name: Test litestar pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-litestar" - name: Test pyramid pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pyramid" - name: Test quart pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-quart" - name: Test sanic pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-sanic" - name: Test starlite pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-starlite" - name: Test tornado pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-tornado" - name: Generate coverage XML (Python 3.6) if: ${{ !cancelled() && matrix.python-version == '3.6' }} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: ${{ !cancelled() && matrix.python-version != '3.6' }} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: ${{ !cancelled() }} uses: codecov/codecov-action@v4.6.0 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: .junitxml verbose: true check_required_tests: name: All pinned Web Frameworks 2 tests passed needs: test-web_frameworks_2-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-web_frameworks_2-pinned.result, 'failure') || contains(needs.test-web_frameworks_2-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/.gitignore000066400000000000000000000004661471214654000164470ustar00rootroot00000000000000*.pyc *.log *.egg *.db *.pid .python-version .coverage .coverage-sentry* coverage.xml .junitxml* .DS_Store .tox pip-log.txt *.egg-info /build /dist /dist-serverless sentry-python-serverless*.zip .cache .idea .eggs venv .venv .vscode/tags .pytest_cache .hypothesis relay pip-wheel-metadata .mypy_cache .vscode/ sentry-python-2.18.0/.gitmodules000066400000000000000000000001741471214654000166300ustar00rootroot00000000000000[submodule "checkouts/data-schemas"] path = checkouts/data-schemas url = https://github.com/getsentry/sentry-data-schemas sentry-python-2.18.0/.pre-commit-config.yaml000066400000000000000000000011741471214654000207350ustar00rootroot00000000000000# See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.3.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - repo: https://github.com/psf/black rev: 24.1.0 hooks: - id: black exclude: ^(.*_pb2.py|.*_pb2_grpc.py) - repo: https://github.com/pycqa/flake8 rev: 5.0.4 hooks: - id: flake8 # Disabled for now, because it lists a lot of problems. #- repo: https://github.com/pre-commit/mirrors-mypy # rev: 'v0.931' # hooks: # - id: mypy sentry-python-2.18.0/CHANGELOG.md000066400000000000000000004230471471214654000162740ustar00rootroot00000000000000# Changelog ## 2.18.0 ### Various fixes & improvements - Add LaunchDarkly and OpenFeature integration (#3648) by @cmanallen - Correct typo in a comment (#3726) by @szokeasaurusrex - End `http.client` span on timeout (#3723) by @Zylphrex - Check for `h2` existence in HTTP/2 transport (#3690) by @BYK - Use `type()` instead when extracting frames (#3716) by @Zylphrex - Prefer `python_multipart` import over `multipart` (#3710) by @musicinmybrain - Update active thread for asgi (#3669) by @Zylphrex - Only enable HTTP2 when DSN is HTTPS (#3678) by @BYK - Prepare for upstream Strawberry extension removal (#3649) by @DoctorJohn - Enhance README with improved clarity and developer-friendly examples (#3667) by @UTSAVS26 - Run license compliance action on all PRs (#3699) by @szokeasaurusrex - Run CodeQL action on all PRs (#3698) by @szokeasaurusrex - Fix UTC assuming test (#3722) by @BYK - Exclude fakeredis 2.26.0 on py3.6 and 3.7 (#3695) by @szokeasaurusrex - Unpin `pytest` for `tornado-latest` tests (#3714) by @szokeasaurusrex - Install `pytest-asyncio` for `redis` tests (Python 3.12-13) (#3706) by @szokeasaurusrex - Clarify that only pinned tests are required (#3713) by @szokeasaurusrex - Remove accidentally-committed print (#3712) by @szokeasaurusrex - Disable broken RQ test in newly-released RQ 2.0 (#3708) by @szokeasaurusrex - Unpin `pytest` for `celery` tests (#3701) by @szokeasaurusrex - Unpin `pytest` on Python 3.8+ `gevent` tests (#3700) by @szokeasaurusrex - Unpin `pytest` for Python 3.8+ `common` tests (#3697) by @szokeasaurusrex - Remove `pytest` pin in `requirements-devenv.txt` (#3696) by @szokeasaurusrex - Test with Falcon 4.0 (#3684) by @sentrivana ## 2.17.0 ### Various fixes & improvements - Add support for async calls in Anthropic and OpenAI integration (#3497) by @vetyy - Allow custom transaction names in ASGI (#3664) by @sl0thentr0py - Langchain: Handle case when parent span wasn't traced (#3656) by @rbasoalto - Fix Anthropic integration when using tool calls (#3615) by @kwnath - More defensive Django Spotlight middleware injection (#3665) by @BYK - Remove `ensure_integration_enabled_async` (#3632) by @sentrivana - Test with newer Falcon version (#3644, #3653, #3662) by @sentrivana - Fix mypy (#3657) by @sentrivana - Fix flaky transport test (#3666) by @sentrivana - Remove pin on `sphinx` (#3650) by @sentrivana - Bump `actions/checkout` from `4.2.0` to `4.2.1` (#3651) by @dependabot ## 2.16.0 ### Integrations - Bottle: Add `failed_request_status_codes` (#3618) by @szokeasaurusrex You can now define a set of integers that will determine which status codes should be reported to Sentry. ```python sentry_sdk.init( integrations=[ BottleIntegration( failed_request_status_codes={403, *range(500, 600)}, ) ] ) ``` Examples of valid `failed_request_status_codes`: - `{500}` will only send events on HTTP 500. - `{400, *range(500, 600)}` will send events on HTTP 400 as well as the 5xx range. - `{500, 503}` will send events on HTTP 500 and 503. - `set()` (the empty set) will not send events for any HTTP status code. The default is `{*range(500, 600)}`, meaning that all 5xx status codes are reported to Sentry. - Bottle: Delete never-reached code (#3605) by @szokeasaurusrex - Redis: Remove flaky test (#3626) by @sentrivana - Django: Improve getting `psycopg3` connection info (#3580) by @nijel - Django: Add `SpotlightMiddleware` when Spotlight is enabled (#3600) by @BYK - Django: Open relevant error when `SpotlightMiddleware` is on (#3614) by @BYK - Django: Support `http_methods_to_capture` in ASGI Django (#3607) by @sentrivana ASGI Django now also supports the `http_methods_to_capture` integration option. This is a configurable tuple of HTTP method verbs that should create a transaction in Sentry. The default is `("CONNECT", "DELETE", "GET", "PATCH", "POST", "PUT", "TRACE",)`. `OPTIONS` and `HEAD` are not included by default. Here's how to use it: ```python sentry_sdk.init( integrations=[ DjangoIntegration( http_methods_to_capture=("GET", "POST"), ), ], ) ``` ### Miscellaneous - Add 3.13 to setup.py (#3574) by @sentrivana - Add 3.13 to basepython (#3589) by @sentrivana - Fix type of `sample_rate` in DSC (and add explanatory tests) (#3603) by @antonpirker - Add `httpcore` based `HTTP2Transport` (#3588) by @BYK - Add opportunistic Brotli compression (#3612) by @BYK - Add `__notes__` support (#3620) by @szokeasaurusrex - Remove useless makefile targets (#3604) by @antonpirker - Simplify tox version spec (#3609) by @sentrivana - Consolidate contributing docs (#3606) by @antonpirker - Bump `codecov/codecov-action` from `4.5.0` to `4.6.0` (#3617) by @dependabot ## 2.15.0 ### Integrations - Configure HTTP methods to capture in ASGI/WSGI middleware and frameworks (#3531) by @antonpirker We've added a new option to the Django, Flask, Starlette and FastAPI integrations called `http_methods_to_capture`. This is a configurable tuple of HTTP method verbs that should create a transaction in Sentry. The default is `("CONNECT", "DELETE", "GET", "PATCH", "POST", "PUT", "TRACE",)`. `OPTIONS` and `HEAD` are not included by default. Here's how to use it (substitute Flask for your framework integration): ```python sentry_sdk.init( integrations=[ FlaskIntegration( http_methods_to_capture=("GET", "POST"), ), ], ) ``` - Django: Allow ASGI to use `drf_request` in `DjangoRequestExtractor` (#3572) by @PakawiNz - Django: Don't let `RawPostDataException` bubble up (#3553) by @sentrivana - Django: Add `sync_capable` to `SentryWrappingMiddleware` (#3510) by @szokeasaurusrex - AIOHTTP: Add `failed_request_status_codes` (#3551) by @szokeasaurusrex You can now define a set of integers that will determine which status codes should be reported to Sentry. ```python sentry_sdk.init( integrations=[ AioHttpIntegration( failed_request_status_codes={403, *range(500, 600)}, ) ] ) ``` Examples of valid `failed_request_status_codes`: - `{500}` will only send events on HTTP 500. - `{400, *range(500, 600)}` will send events on HTTP 400 as well as the 5xx range. - `{500, 503}` will send events on HTTP 500 and 503. - `set()` (the empty set) will not send events for any HTTP status code. The default is `{*range(500, 600)}`, meaning that all 5xx status codes are reported to Sentry. - AIOHTTP: Delete test which depends on AIOHTTP behavior (#3568) by @szokeasaurusrex - AIOHTTP: Handle invalid responses (#3554) by @szokeasaurusrex - FastAPI/Starlette: Support new `failed_request_status_codes` (#3563) by @szokeasaurusrex The format of `failed_request_status_codes` has changed from a list of integers and containers to a set: ```python sentry_sdk.init( integrations=StarletteIntegration( failed_request_status_codes={403, *range(500, 600)}, ), ) ``` The old way of defining `failed_request_status_codes` will continue to work for the time being. Examples of valid new-style `failed_request_status_codes`: - `{500}` will only send events on HTTP 500. - `{400, *range(500, 600)}` will send events on HTTP 400 as well as the 5xx range. - `{500, 503}` will send events on HTTP 500 and 503. - `set()` (the empty set) will not send events for any HTTP status code. The default is `{*range(500, 600)}`, meaning that all 5xx status codes are reported to Sentry. - FastAPI/Starlette: Fix `failed_request_status_codes=[]` (#3561) by @szokeasaurusrex - FastAPI/Starlette: Remove invalid `failed_request_status_code` tests (#3560) by @szokeasaurusrex - FastAPI/Starlette: Refactor shared test parametrization (#3562) by @szokeasaurusrex ### Miscellaneous - Deprecate `sentry_sdk.metrics` (#3512) by @szokeasaurusrex - Add `name` parameter to `start_span()` and deprecate `description` parameter (#3524 & #3525) by @antonpirker - Fix `add_query_source` with modules outside of project root (#3313) by @rominf - Test more integrations on 3.13 (#3578) by @sentrivana - Fix trailing whitespace (#3579) by @sentrivana - Improve `get_integration` typing (#3550) by @szokeasaurusrex - Make import-related tests stable (#3548) by @BYK - Fix breadcrumb sorting (#3511) by @sentrivana - Fix breadcrumb timestamp casting and its tests (#3546) by @BYK - Don't use deprecated `logger.warn` (#3552) by @sentrivana - Fix Cohere API change (#3549) by @BYK - Fix deprecation message (#3536) by @antonpirker - Remove experimental `explain_plan` feature. (#3534) by @antonpirker - X-fail one of the Lambda tests (#3592) by @antonpirker - Update Codecov config (#3507) by @antonpirker - Update `actions/upload-artifact` to `v4` with merge (#3545) by @joshuarli - Bump `actions/checkout` from `4.1.7` to `4.2.0` (#3585) by @dependabot ## 2.14.0 ### Various fixes & improvements - New `SysExitIntegration` (#3401) by @szokeasaurusrex For more information, see the documentation for the [SysExitIntegration](https://docs.sentry.io/platforms/python/integrations/sys_exit). - Add `SENTRY_SPOTLIGHT` env variable support (#3443) by @BYK - Support Strawberry `0.239.2` (#3491) by @szokeasaurusrex - Add separate `pii_denylist` to `EventScrubber` and run it always (#3463) by @sl0thentr0py - Celery: Add wrapper for `Celery().send_task` to support behavior as `Task.apply_async` (#2377) by @divaltor - Django: SentryWrappingMiddleware.__init__ fails if super() is object (#2466) by @cameron-simpson - Fix data_category for sessions envelope items (#3473) by @sl0thentr0py - Fix non-UTC timestamps (#3461) by @szokeasaurusrex - Remove obsolete object as superclass (#3480) by @sentrivana - Replace custom `TYPE_CHECKING` with stdlib `typing.TYPE_CHECKING` (#3447) by @dev-satoshi - Refactor `tracing_utils.py` (#3452) by @rominf - Explicitly export symbol in subpackages instead of ignoring (#3400) by @hartungstenio - Better test coverage reports (#3498) by @antonpirker - Fixed config for old coverage versions (#3504) by @antonpirker - Fix AWS Lambda tests (#3495) by @antonpirker - Remove broken Bottle tests (#3505) by @sentrivana ## 2.13.0 ### Various fixes & improvements - **New integration:** [Ray](https://docs.sentry.io/platforms/python/integrations/ray/) (#2400) (#2444) by @glowskir Usage: (add the RayIntegration to your `sentry_sdk.init()` call and make sure it is called in the worker processes) ```python import ray import sentry_sdk from sentry_sdk.integrations.ray import RayIntegration def init_sentry(): sentry_sdk.init( dsn="...", traces_sample_rate=1.0, integrations=[RayIntegration()], ) init_sentry() ray.init( runtime_env=dict(worker_process_setup_hook=init_sentry), ) ``` For more information, see the documentation for the [Ray integration](https://docs.sentry.io/platforms/python/integrations/ray/). - **New integration:** [Litestar](https://docs.sentry.io/platforms/python/integrations/litestar/) (#2413) (#3358) by @KellyWalker Usage: (add the LitestarIntegration to your `sentry_sdk.init()`) ```python from litestar import Litestar, get import sentry_sdk from sentry_sdk.integrations.litestar import LitestarIntegration sentry_sdk.init( dsn="...", traces_sample_rate=1.0, integrations=[LitestarIntegration()], ) @get("/") async def index() -> str: return "Hello, world!" app = Litestar(...) ``` For more information, see the documentation for the [Litestar integration](https://docs.sentry.io/platforms/python/integrations/litestar/). - **New integration:** [Dramatiq](https://docs.sentry.io/platforms/python/integrations/dramatiq/) from @jacobsvante (#3397) by @antonpirker Usage: (add the DramatiqIntegration to your `sentry_sdk.init()`) ```python import dramatiq import sentry_sdk from sentry_sdk.integrations.dramatiq import DramatiqIntegration sentry_sdk.init( dsn="...", traces_sample_rate=1.0, integrations=[DramatiqIntegration()], ) @dramatiq.actor(max_retries=0) def dummy_actor(x, y): return x / y dummy_actor.send(12, 0) ``` For more information, see the documentation for the [Dramatiq integration](https://docs.sentry.io/platforms/python/integrations/dramatiq/). - **New config option:** Expose `custom_repr` function that precedes `safe_repr` invocation in serializer (#3438) by @sl0thentr0py See: https://docs.sentry.io/platforms/python/configuration/options/#custom-repr - Profiling: Add client SDK info to profile chunk (#3386) by @Zylphrex - Serialize vars early to avoid living references (#3409) by @sl0thentr0py - Deprecate hub-based `sessions.py` logic (#3419) by @szokeasaurusrex - Deprecate `is_auto_session_tracking_enabled` (#3428) by @szokeasaurusrex - Add note to generated yaml files (#3423) by @sentrivana - Slim down PR template (#3382) by @sentrivana - Use new banner in readme (#3390) by @sentrivana ## 2.12.0 ### Various fixes & improvements - API: Expose the scope getters to top level API and use them everywhere (#3357) by @sl0thentr0py - API: `push_scope` deprecation warning (#3355) (#3355) by @szokeasaurusrex - API: Replace `push_scope` (#3353, #3354) by @szokeasaurusrex - API: Deprecate, avoid, or stop using `configure_scope` (#3348, #3349, #3350, #3351) by @szokeasaurusrex - OTel: Remove experimental autoinstrumentation (#3239) by @sentrivana - Graphene: Add span for grapqhl operation (#2788) by @czyber - AI: Add async support for `ai_track` decorator (#3376) by @czyber - CI: Workaround bug preventing Django test runs (#3371) by @szokeasaurusrex - CI: Remove Django setuptools pin (#3378) by @szokeasaurusrex - Tests: Test with Django 5.1 RC (#3370) by @sentrivana - Broaden `add_attachment` type (#3342) by @szokeasaurusrex - Add span data to the transactions trace context (#3374) by @antonpirker - Gracefully fail attachment path not found case (#3337) by @sl0thentr0py - Document attachment parameters (#3342) by @szokeasaurusrex - Bump checkouts/data-schemas from `0feb234` to `6d2c435` (#3369) by @dependabot - Bump checkouts/data-schemas from `88273a9` to `0feb234` (#3252) by @dependabot ## 2.11.0 ### Various fixes & improvements - Add `disabled_integrations` (#3328) by @sentrivana Disabling individual integrations is now much easier. Instead of disabling all automatically enabled integrations and specifying the ones you want to keep, you can now use the new [`disabled_integrations`](https://docs.sentry.io/platforms/python/configuration/options/#auto-enabling-integrations) config option to provide a list of integrations to disable: ```python import sentry_sdk from sentry_sdk.integrations.flask import FlaskIntegration sentry_sdk.init( # Do not use the Flask integration even if Flask is installed. disabled_integrations=[ FlaskIntegration(), ], ) ``` - Use operation name as transaction name in Strawberry (#3294) by @sentrivana - WSGI integrations respect `SCRIPT_NAME` env variable (#2622) by @sarvaSanjay - Make Django DB spans have origin `auto.db.django` (#3319) by @antonpirker - Sort breadcrumbs by time before sending (#3307) by @antonpirker - Fix `KeyError('sentry-monitor-start-timestamp-s')` (#3278) by @Mohsen-Khodabakhshi - Set MongoDB tags directly on span data (#3290) by @0Calories - Lower logger level for some messages (#3305) by @sentrivana and @antonpirker - Emit deprecation warnings from `Hub` API (#3280) by @szokeasaurusrex - Clarify that `instrumenter` is internal-only (#3299) by @szokeasaurusrex - Support Django 5.1 (#3207) by @sentrivana - Remove apparently unnecessary `if` (#3298) by @szokeasaurusrex - Preliminary support for Python 3.13 (#3200) by @sentrivana - Move `sentry_sdk.init` out of `hub.py` (#3276) by @szokeasaurusrex - Unhardcode integration list (#3240) by @rominf - Allow passing of PostgreSQL port in tests (#3281) by @rominf - Add tests for `@ai_track` decorator (#3325) by @colin-sentry - Do not include type checking code in coverage report (#3327) by @antonpirker - Fix test_installed_modules (#3309) by @szokeasaurusrex - Fix typos and grammar in a comment (#3293) by @szokeasaurusrex - Fixed failed tests setup (#3303) by @antonpirker - Only assert warnings we are interested in (#3314) by @szokeasaurusrex ## 2.10.0 ### Various fixes & improvements - Add client cert and key support to `HttpTransport` (#3258) by @grammy-jiang Add `cert_file` and `key_file` to your `sentry_sdk.init` to use a custom client cert and key. Alternatively, the environment variables `CLIENT_CERT_FILE` and `CLIENT_KEY_FILE` can be used as well. - OpenAI: Lazy initialize tiktoken to avoid http at import time (#3287) by @colin-sentry - OpenAI, Langchain: Make tiktoken encoding name configurable + tiktoken usage opt-in (#3289) by @colin-sentry Fixed a bug where having certain packages installed along the Sentry SDK caused an HTTP request to be made to OpenAI infrastructure when the Sentry SDK was initialized. The request was made when the `tiktoken` package and at least one of the `openai` or `langchain` packages were installed. The request was fetching a `tiktoken` encoding in order to correctly measure token usage in some OpenAI and Langchain calls. This behavior is now opt-in. The choice of encoding to use was made configurable as well. To opt in, set the `tiktoken_encoding_name` parameter in the OpenAPI or Langchain integration. ```python sentry_sdk.init( integrations=[ OpenAIIntegration(tiktoken_encoding_name="cl100k_base"), LangchainIntegration(tiktoken_encoding_name="cl100k_base"), ], ) ``` - PyMongo: Send query description as valid JSON (#3291) by @0Calories - Remove Python 2 compatibility code (#3284) by @szokeasaurusrex - Fix `sentry_sdk.init` type hint (#3283) by @szokeasaurusrex - Deprecate `hub` in `Profile` (#3270) by @szokeasaurusrex - Stop using `Hub` in `init` (#3275) by @szokeasaurusrex - Delete `_should_send_default_pii` (#3274) by @szokeasaurusrex - Remove `Hub` usage in `conftest` (#3273) by @szokeasaurusrex - Rename debug logging filter (#3260) by @szokeasaurusrex - Update `NoOpSpan.finish` signature (#3267) by @szokeasaurusrex - Remove `Hub` in `Transaction.finish` (#3267) by @szokeasaurusrex - Remove Hub from `capture_internal_exception` logic (#3264) by @szokeasaurusrex - Improve `Scope._capture_internal_exception` type hint (#3264) by @szokeasaurusrex - Correct `ExcInfo` type (#3266) by @szokeasaurusrex - Stop using `Hub` in `tracing_utils` (#3269) by @szokeasaurusrex ## 2.9.0 ### Various fixes & improvements - ref(transport): Improve event data category typing (#3243) by @szokeasaurusrex - ref(tracing): Improved handling of span status (#3261) by @antonpirker - test(client): Add tests for dropped span client reports (#3244) by @szokeasaurusrex - test(transport): Test new client report features (#3244) by @szokeasaurusrex - feat(tracing): Record lost spans in client reports (#3244) by @szokeasaurusrex - test(sampling): Replace custom logic with `capture_record_lost_event_calls` (#3257) by @szokeasaurusrex - test(transport): Non-order-dependent discarded events assertion (#3255) by @szokeasaurusrex - test(core): Introduce `capture_record_lost_event_calls` fixture (#3254) by @szokeasaurusrex - test(core): Fix non-idempotent test (#3253) by @szokeasaurusrex ## 2.8.0 ### Various fixes & improvements - `profiler_id` uses underscore (#3249) by @Zylphrex - Don't send full env to subprocess (#3251) by @kmichel-aiven - Stop using `Hub` in `HttpTransport` (#3247) by @szokeasaurusrex - Remove `ipdb` from test requirements (#3237) by @rominf - Avoid propagation of empty baggage (#2968) by @hartungstenio - Add entry point for `SentryPropagator` (#3086) by @mender - Bump checkouts/data-schemas from `8c13457` to `88273a9` (#3225) by @dependabot ## 2.7.1 ### Various fixes & improvements - fix(otel): Fix missing baggage (#3218) by @sentrivana - This is the config file of asdf-vm which we do not use. (#3215) by @antonpirker - Added option to disable middleware spans in Starlette (#3052) by @antonpirker - build: Update tornado version in setup.py to match code check. (#3206) by @aclemons ## 2.7.0 - Add `origin` to spans and transactions (#3133) by @antonpirker - OTel: Set up typing for OTel (#3168) by @sentrivana - OTel: Auto instrumentation skeleton (#3143) by @sentrivana - OpenAI: If there is an internal error, still return a value (#3192) by @colin-sentry - MongoDB: Add MongoDB collection span tag (#3182) by @0Calories - MongoDB: Change span operation from `db.query` to `db` (#3186) by @0Calories - MongoDB: Remove redundant command name in query description (#3189) by @0Calories - Apache Spark: Fix spark driver integration (#3162) by @seyoon-lim - Apache Spark: Add Spark test suite to tox.ini and to CI (#3199) by @sentrivana - Codecov: Add failed test commits in PRs (#3190) by @antonpirker - Update library, Python versions in tests (#3202) by @sentrivana - Remove Hub from our test suite (#3197) by @antonpirker - Use env vars for default CA cert bundle location (#3160) by @DragoonAethis - Create a separate test group for AI (#3198) by @sentrivana - Add additional stub packages for type checking (#3122) by @Daverball - Proper naming of requirements files (#3191) by @antonpirker - Pinning pip because new version does not work with some versions of Celery and Httpx (#3195) by @antonpirker - build(deps): bump supercharge/redis-github-action from 1.7.0 to 1.8.0 (#3193) by @dependabot - build(deps): bump actions/checkout from 4.1.6 to 4.1.7 (#3171) by @dependabot - build(deps): update pytest-asyncio requirement (#3087) by @dependabot ## 2.6.0 - Introduce continuous profiling mode (#2830) by @Zylphrex - Profiling: Add deprecation comment for profiler internals (#3167) by @sentrivana - Profiling: Move thread data to trace context (#3157) by @Zylphrex - Explicitly export cron symbols for typecheckers (#3072) by @spladug - Cleaning up ASGI tests for Django (#3180) by @antonpirker - Celery: Add Celery receive latency (#3174) by @antonpirker - Metrics: Update type hints for tag values (#3156) by @elramen - Django: Fix psycopg3 reconnect error (#3111) by @szokeasaurusrex - Tracing: Keep original function signature when decorated (#3178) by @sentrivana - Reapply "Refactor the Celery Beat integration (#3105)" (#3144) (#3175) by @antonpirker - Added contributor image to readme (#3183) by @antonpirker - bump actions/checkout from 4.1.4 to 4.1.6 (#3147) by @dependabot - bump checkouts/data-schemas from `59f9683` to `8c13457` (#3146) by @dependabot ## 2.5.1 This change fixes a regression in our cron monitoring feature, which caused cron checkins not to be sent. The regression appears to have been introduced in version 2.4.0. **We recommend that all users, who use Cron monitoring and are currently running sentry-python ≥2.4.0, upgrade to this release as soon as possible!** ### Other fixes & improvements - feat(tracing): Warn if not-started transaction entered (#3003) by @szokeasaurusrex - test(scope): Ensure `last_event_id` cleared (#3124) by @szokeasaurusrex - fix(scope): Clear last_event_id on scope clear (#3124) by @szokeasaurusrex ## 2.5.0 ### Various fixes & improvements - Allow to configure status codes to report to Sentry in Starlette and FastAPI (#3008) by @sentrivana By passing a new option to the FastAPI and Starlette integrations, you're now able to configure what status codes should be sent as events to Sentry. Here's how it works: ```python from sentry_sdk.integrations.starlette import StarletteIntegration from sentry_sdk.integrations.fastapi import FastApiIntegration sentry_sdk.init( # ... integrations=[ StarletteIntegration( failed_request_status_codes=[403, range(500, 599)], ), FastApiIntegration( failed_request_status_codes=[403, range(500, 599)], ), ] ) ``` `failed_request_status_codes` expects a list of integers or containers (objects that allow membership checks via `in`) of integers. Examples of valid `failed_request_status_codes`: - `[500]` will only send events on HTTP 500. - `[400, range(500, 599)]` will send events on HTTP 400 as well as the 500-599 range. - `[500, 503]` will send events on HTTP 500 and 503. The default is `[range(500, 599)]`. See the [FastAPI](https://docs.sentry.io/platforms/python/integrations/fastapi/) and [Starlette](https://docs.sentry.io/platforms/python/integrations/starlette/) integration docs for more details. - Support multiple keys with `cache_prefixes` (#3136) by @sentrivana - Support integer Redis keys (#3132) by @sentrivana - Update SDK version in CONTRIBUTING.md (#3129) by @sentrivana - Bump actions/checkout from 4.1.4 to 4.1.5 (#3067) by @dependabot ## 2.4.0 ### Various fixes & improvements - Celery: Made `cache.key` span data field a list (#3110) by @antonpirker - Celery Beat: Refactor the Celery Beat integration (#3105) by @antonpirker - GRPC: Add None check for grpc.aio interceptor (#3109) by @ordinary-jamie - Docs: Remove `last_event_id` from migration guide (#3126) by @szokeasaurusrex - fix(django): Proper transaction names for i18n routes (#3104) by @sentrivana - fix(scope): Copy `_last_event_id` in `Scope.__copy__` (#3123) by @szokeasaurusrex - fix(tests): Adapt to new Anthropic version (#3119) by @sentrivana - build(deps): bump checkouts/data-schemas from `4381a97` to `59f9683` (#3066) by @dependabot ## 2.3.1 ### Various fixes & improvements - Handle also byte arras as strings in Redis caches (#3101) by @antonpirker - Do not crash exceptiongroup (by patching excepthook and keeping the name of the function) (#3099) by @antonpirker ## 2.3.0 ### Various fixes & improvements - NEW: Redis integration supports now Sentry Caches module. See https://docs.sentry.io/product/performance/caches/ (#3073) by @antonpirker - NEW: Django integration supports now Sentry Caches module. See https://docs.sentry.io/product/performance/caches/ (#3009) by @antonpirker - Fix `cohere` testsuite for new release of `cohere` (#3098) by @antonpirker - Fix ClickHouse integration where `_sentry_span` might be missing (#3096) by @sentrivana ## 2.2.1 ### Various fixes & improvements - Add conditional check for delivery_info's existence (#3083) by @cmanallen - Updated deps for latest langchain version (#3092) by @antonpirker - Fixed grpcio extras to work as described in the docs (#3081) by @antonpirker - Use pythons venv instead of virtualenv to create virtual envs (#3077) by @antonpirker - Celery: Add comment about kwargs_headers (#3079) by @szokeasaurusrex - Celery: Queues module producer implementation (#3079) by @szokeasaurusrex - Fix N803 flake8 failures (#3082) by @szokeasaurusrex ## 2.2.0 ### New features - Celery integration now sends additional data to Sentry to enable new features to guage the health of your queues - Added a new integration for Cohere - Reintroduced the `last_event_id` function, which had been removed in 2.0.0 ### Other fixes & improvements - Add tags + data passing functionality to @ai_track (#3071) by @colin-sentry - Only propagate headers from spans within transactions (#3070) by @szokeasaurusrex - Improve type hints for set metrics (#3048) by @elramen - Fix `get_client` typing (#3063) by @szokeasaurusrex - Auto-enable Anthropic integration + gate imports (#3054) by @colin-sentry - Made `MeasurementValue.unit` NotRequired (#3051) by @antonpirker ## 2.1.1 - Fix trace propagation in Celery tasks started by Celery Beat. (#3047) by @antonpirker ## 2.1.0 - fix(quart): Fix Quart integration (#3043) by @szokeasaurusrex - **New integration:** [Langchain](https://docs.sentry.io/platforms/python/integrations/langchain/) (#2911) by @colin-sentry Usage: (Langchain is auto enabling, so you do not need to do anything special) ```python from langchain_openai import ChatOpenAI import sentry_sdk sentry_sdk.init( dsn="...", enable_tracing=True, traces_sample_rate=1.0, ) llm = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) ``` Check out [the LangChain docs](https://docs.sentry.io/platforms/python/integrations/langchain/) for details. - **New integration:** [Anthropic](https://docs.sentry.io/platforms/python/integrations/anthropic/) (#2831) by @czyber Usage: (add the AnthropicIntegration to your `sentry_sdk.init()` call) ```python from anthropic import Anthropic import sentry_sdk sentry_sdk.init( dsn="...", enable_tracing=True, traces_sample_rate=1.0, integrations=[AnthropicIntegration()], ) client = Anthropic() ``` Check out [the Anthropic docs](https://docs.sentry.io/platforms/python/integrations/anthropic/) for details. - **New integration:** [Huggingface Hub](https://docs.sentry.io/platforms/python/integrations/huggingface/) (#3033) by @colin-sentry Usage: (Huggingface Hub is auto enabling, so you do not need to do anything special) ```python import sentry_sdk from huggingface_hub import InferenceClient sentry_sdk.init( dsn="...", enable_tracing=True, traces_sample_rate=1.0, ) client = InferenceClient("some-model") ``` Check out [the Huggingface docs](https://docs.sentry.io/platforms/python/integrations/huggingface/) for details. (comming soon!) - fix(huggingface): Reduce API cross-section for huggingface in test (#3042) by @colin-sentry - fix(django): Fix Django ASGI integration on Python 3.12 (#3027) by @bellini666 - feat(perf): Add ability to put measurements directly on spans. (#2967) by @colin-sentry - fix(tests): Fix trytond tests (#3031) by @sentrivana - fix(tests): Update `pytest-asyncio` to fix CI (#3030) by @sentrivana - fix(docs): Link to respective migration guides directly (#3020) by @sentrivana - docs(scope): Add docstring to `Scope.set_tags` (#2978) by @szokeasaurusrex - test(scope): Fix typos in assert error message (#2978) by @szokeasaurusrex - feat(scope): New `set_tags` function (#2978) by @szokeasaurusrex - test(scope): Add unit test for `Scope.set_tags` (#2978) by @szokeasaurusrex - feat(scope): Add `set_tags` to top-level API (#2978) by @szokeasaurusrex - test(scope): Add unit test for top-level API `set_tags` (#2978) by @szokeasaurusrex - feat(tests): Parallelize tox (#3025) by @sentrivana - build(deps): Bump checkouts/data-schemas from `4aa14a7` to `4381a97` (#3028) by @dependabot - meta(license): Bump copyright year (#3029) by @szokeasaurusrex ## 2.0.1 ### Various fixes & improvements - Fix: Do not use convenience decorator (#3022) by @sentrivana - Refactoring propagation context (#2970) by @antonpirker - Use `pid` for test database name in Django tests (#2998) by @antonpirker - Remove outdated RC mention in docs (#3018) by @sentrivana - Delete inaccurate comment from docs (#3002) by @szokeasaurusrex - Add Lambda function that deletes test Lambda functions (#2960) by @antonpirker - Correct discarded transaction debug message (#3002) by @szokeasaurusrex - Add tests for discarded transaction debug messages (#3002) by @szokeasaurusrex - Fix comment typo in metrics (#2992) by @szokeasaurusrex - build(deps): bump actions/checkout from 4.1.1 to 4.1.4 (#3011) by @dependabot - build(deps): bump checkouts/data-schemas from `1e17eb5` to `4aa14a7` (#2997) by @dependabot ## 2.0.0 This is the first major update in a *long* time! We dropped support for some ancient languages and frameworks (Yes, Python 2.7 is no longer supported). Additionally we refactored a big part of the foundation of the SDK (how data inside the SDK is handled). We hope you like it! For a shorter version of what you need to do, to upgrade to Sentry SDK 2.0 see: https://docs.sentry.io/platforms/python/migration/1.x-to-2.x ### New Features - Additional integrations will now be activated automatically if the SDK detects the respective package is installed: Ariadne, ARQ, asyncpg, Chalice, clickhouse-driver, GQL, Graphene, huey, Loguru, PyMongo, Quart, Starlite, Strawberry. - Added new API for custom instrumentation: `new_scope`, `isolation_scope`. See the [Deprecated](#deprecated) section to see how they map to the existing APIs. ### Changed (These changes are all backwards-incompatible. **Breaking Change** (if you are just skimming for that phrase)) - The Pyramid integration will not capture errors that might happen in `authenticated_userid()` in a custom `AuthenticationPolicy` class. - The method `need_code_loation` of the `MetricsAggregator` was renamed to `need_code_location`. - The `BackgroundWorker` thread used to process events was renamed from `raven-sentry.BackgroundWorker` to `sentry-sdk.BackgroundWorker`. - The `reraise` function was moved from `sentry_sdk._compat` to `sentry_sdk.utils`. - The `_ScopeManager` was moved from `sentry_sdk.hub` to `sentry_sdk.scope`. - Moved the contents of `tracing_utils_py3.py` to `tracing_utils.py`. The `start_child_span_decorator` is now in `sentry_sdk.tracing_utils`. - The actual implementation of `get_current_span` was moved to `sentry_sdk.tracing_utils`. `sentry_sdk.get_current_span` is still accessible as part of the top-level API. - `sentry_sdk.tracing_utils.add_query_source()`: Removed the `hub` parameter. It is not necessary anymore. - `sentry_sdk.tracing_utils.record_sql_queries()`: Removed the `hub` parameter. It is not necessary anymore. - `sentry_sdk.tracing_utils.get_current_span()` does now take a `scope` instead of a `hub` as parameter. - `sentry_sdk.tracing_utils.should_propagate_trace()` now takes a `Client` instead of a `Hub` as first parameter. - `sentry_sdk.utils.is_sentry_url()` now takes a `Client` instead of a `Hub` as first parameter. - `sentry_sdk.utils._get_contextvars` does not return a tuple with three values, but a tuple with two values. The `copy_context` was removed. - If you create a transaction manually and later mutate the transaction in a `configure_scope` block this does not work anymore. Here is a recipe on how to change your code to make it work: Your existing implementation: ```python transaction = sentry_sdk.transaction(...) # later in the code execution: with sentry_sdk.configure_scope() as scope: scope.set_transaction_name("new-transaction-name") ``` needs to be changed to this: ```python transaction = sentry_sdk.transaction(...) # later in the code execution: scope = sentry_sdk.get_current_scope() scope.set_transaction_name("new-transaction-name") ``` - The classes listed in the table below are now abstract base classes. Therefore, they can no longer be instantiated. Subclasses can only be instantiated if they implement all of the abstract methods.
Show table | Class | Abstract methods | | ------------------------------------- | -------------------------------------- | | `sentry_sdk.integrations.Integration` | `setup_once` | | `sentry_sdk.metrics.Metric` | `add`, `serialize_value`, and `weight` | | `sentry_sdk.profiler.Scheduler` | `setup` and `teardown` | | `sentry_sdk.transport.Transport` | `capture_envelope` |
### Removed (These changes are all backwards-incompatible. **Breaking Change** (if you are just skimming for that phrase)) - Removed support for Python 2 and Python 3.5. The SDK now requires at least Python 3.6. - Removed support for Celery 3.\*. - Removed support for Django 1.8, 1.9, 1.10. - Removed support for Flask 0.\*. - Removed support for gRPC < 1.39. - Removed support for Tornado < 6. - Removed `last_event_id()` top level API. The last event ID is still returned by `capture_event()`, `capture_exception()` and `capture_message()` but the top level API `sentry_sdk.last_event_id()` has been removed. - Removed support for sending events to the `/store` endpoint. Everything is now sent to the `/envelope` endpoint. If you're on SaaS you don't have to worry about this, but if you're running Sentry yourself you'll need version `20.6.0` or higher of self-hosted Sentry. - The deprecated `with_locals` configuration option was removed. Use `include_local_variables` instead. See https://docs.sentry.io/platforms/python/configuration/options/#include-local-variables. - The deprecated `request_bodies` configuration option was removed. Use `max_request_body_size`. See https://docs.sentry.io/platforms/python/configuration/options/#max-request-body-size. - Removed support for `user.segment`. It was also removed from the trace header as well as from the dynamic sampling context. - Removed support for the `install` method for custom integrations. Please use `setup_once` instead. - Removed `sentry_sdk.tracing.Span.new_span`. Use `sentry_sdk.tracing.Span.start_child` instead. - Removed `sentry_sdk.tracing.Transaction.new_span`. Use `sentry_sdk.tracing.Transaction.start_child` instead. - Removed support for creating transactions via `sentry_sdk.tracing.Span(transaction=...)`. To create a transaction, please use `sentry_sdk.tracing.Transaction(name=...)`. - Removed `sentry_sdk.utils.Auth.store_api_url`. - `sentry_sdk.utils.Auth.get_api_url`'s now accepts a `sentry_sdk.consts.EndpointType` enum instead of a string as its only parameter. We recommend omitting this argument when calling the function, since the parameter's default value is the only possible `sentry_sdk.consts.EndpointType` value. The parameter exists for future compatibility. - Removed `tracing_utils_py2.py`. The `start_child_span_decorator` is now in `sentry_sdk.tracing_utils`. - Removed the `sentry_sdk.profiler.Scheduler.stop_profiling` method. Any calls to this method can simply be removed, since this was a no-op method. ### Deprecated - Using the `Hub` directly as well as using hub-based APIs has been deprecated. Where available, use [the top-level API instead](sentry_sdk/api.py); otherwise use the [scope API](sentry_sdk/scope.py) or the [client API](sentry_sdk/client.py). Before: ```python with hub.start_span(...): # do something ``` After: ```python import sentry_sdk with sentry_sdk.start_span(...): # do something ``` - Hub cloning is deprecated. Before: ```python with Hub(Hub.current) as hub: # do something with the cloned hub ``` After: ```python import sentry_sdk with sentry_sdk.isolation_scope() as scope: # do something with the forked scope ``` - `configure_scope` is deprecated. Use the new isolation scope directly via `get_isolation_scope()` instead. Before: ```python with configure_scope() as scope: # do something with `scope` ``` After: ```python from sentry_sdk import get_isolation_scope scope = get_isolation_scope() # do something with `scope` ``` - `push_scope` is deprecated. Use the new `new_scope` context manager to fork the necessary scopes. Before: ```python with push_scope() as scope: # do something with `scope` ``` After: ```python import sentry_sdk with sentry_sdk.new_scope() as scope: # do something with `scope` ``` - Accessing the client via the hub has been deprecated. Use the top-level `sentry_sdk.get_client()` to get the current client. - `profiler_mode` and `profiles_sample_rate` have been deprecated as `_experiments` options. Use them as top level options instead: ```python sentry_sdk.init( ..., profiler_mode="thread", profiles_sample_rate=1.0, ) ``` - Deprecated `sentry_sdk.transport.Transport.capture_event`. Please use `sentry_sdk.transport.Transport.capture_envelope`, instead. - Passing a function to `sentry_sdk.init`'s `transport` keyword argument has been deprecated. If you wish to provide a custom transport, please pass a `sentry_sdk.transport.Transport` instance or a subclass. - The parameter `propagate_hub` in `ThreadingIntegration()` was deprecated and renamed to `propagate_scope`. ## 1.45.0 This is the final 1.x release for the forseeable future. Development will continue on the 2.x release line. The first 2.x version will be available in the next few weeks. ### Various fixes & improvements - Allow to upsert monitors (#2929) by @sentrivana It's now possible to provide `monitor_config` to the `monitor` decorator/context manager directly: ```python from sentry_sdk.crons import monitor # All keys except `schedule` are optional monitor_config = { "schedule": {"type": "crontab", "value": "0 0 * * *"}, "timezone": "Europe/Vienna", "checkin_margin": 10, "max_runtime": 10, "failure_issue_threshold": 5, "recovery_threshold": 5, } @monitor(monitor_slug='', monitor_config=monitor_config) def tell_the_world(): print('My scheduled task...') ``` Check out [the cron docs](https://docs.sentry.io/platforms/python/crons/) for details. - Add Django `signals_denylist` to filter signals that are attached to by `signals_spans` (#2758) by @lieryan If you want to exclude some Django signals from performance tracking, you can use the new `signals_denylist` Django option: ```python import django.db.models.signals import sentry_sdk sentry_sdk.init( ... integrations=[ DjangoIntegration( ... signals_denylist=[ django.db.models.signals.pre_init, django.db.models.signals.post_init, ], ), ], ) ``` - `increment` for metrics (#2588) by @mitsuhiko `increment` and `inc` are equivalent, so you can pick whichever you like more. - Add `value`, `unit` to `before_emit_metric` (#2958) by @sentrivana If you add a custom `before_emit_metric`, it'll now accept 4 arguments (the `key`, `value`, `unit` and `tags`) instead of just `key` and `tags`. ```python def before_emit(key, value, unit, tags): if key == "removed-metric": return False tags["extra"] = "foo" del tags["release"] return True sentry_sdk.init( ... _experiments={ "before_emit_metric": before_emit, } ) ``` - Remove experimental metric summary options (#2957) by @sentrivana The `_experiments` options `metrics_summary_sample_rate` and `should_summarize_metric` have been removed. - New normalization rules for metric keys, names, units, tags (#2946) by @sentrivana - Change `data_category` from `statsd` to `metric_bucket` (#2954) by @cleptric - Accessing `__mro__` might throw a `ValueError` (#2952) by @sentrivana - Suppress prompt spawned by subprocess when using `pythonw` (#2936) by @collinbanko - Handle `None` in GraphQL query #2715 (#2762) by @czyber - Do not send "quiet" Sanic exceptions to Sentry (#2821) by @hamedsh - Implement `metric_bucket` rate limits (#2933) by @cleptric - Fix type hints for `monitor` decorator (#2944) by @szokeasaurusrex - Remove deprecated `typing` imports in crons (#2945) by @szokeasaurusrex - Make `monitor_config` a `TypedDict` (#2931) by @sentrivana - Add `devenv-requirements.txt` and update env setup instructions (#2761) by @arr-ee - Bump `types-protobuf` from `4.24.0.20240311` to `4.24.0.20240408` (#2941) by @dependabot - Disable Codecov check run annotations (#2537) by @eliatcodecov ## 1.44.1 ### Various fixes & improvements - Make `monitor` async friendly (#2912) by @sentrivana You can now decorate your async functions with the `monitor` decorator and they will correctly report their duration and completion status. - Fixed `Event | None` runtime `TypeError` (#2928) by @szokeasaurusrex ## 1.44.0 ### Various fixes & improvements - ref: Define types at runtime (#2914) by @szokeasaurusrex - Explicit reexport of types (#2866) (#2913) by @szokeasaurusrex - feat(profiling): Add thread data to spans (#2843) by @Zylphrex ## 1.43.0 ### Various fixes & improvements - Add optional `keep_alive` (#2842) by @sentrivana If you're experiencing frequent network issues between the SDK and Sentry, you can try turning on TCP keep-alive: ```python import sentry_sdk sentry_sdk.init( # ...your usual settings... keep_alive=True, ) ``` - Add support for Celery Redbeat cron tasks (#2643) by @kwigley The SDK now supports the Redbeat scheduler in addition to the default Celery Beat scheduler for auto instrumenting crons. See [the docs](https://docs.sentry.io/platforms/python/integrations/celery/crons/) for more information about how to set this up. - `aws_event` can be an empty list (#2849) by @sentrivana - Re-export `Event` in `types.py` (#2829) by @szokeasaurusrex - Small API docs improvement (#2828) by @antonpirker - Fixed OpenAI tests (#2834) by @antonpirker - Bump `checkouts/data-schemas` from `ed078ed` to `8232f17` (#2832) by @dependabot ## 1.42.0 ### Various fixes & improvements - **New integration:** [OpenAI integration](https://docs.sentry.io/platforms/python/integrations/openai/) (#2791) by @colin-sentry We added an integration for OpenAI to capture errors and also performance data when using the OpenAI Python SDK. Useage: This integrations is auto-enabling, so if you have the `openai` package in your project it will be enabled. Just initialize Sentry before you create your OpenAI client. ```python from openai import OpenAI import sentry_sdk sentry_sdk.init( dsn="___PUBLIC_DSN___", enable_tracing=True, traces_sample_rate=1.0, ) client = OpenAI() ``` For more information, see the documentation for [OpenAI integration](https://docs.sentry.io/platforms/python/integrations/openai/). - Discard open OpenTelemetry spans after 10 minutes (#2801) by @antonpirker - Propagate sentry-trace and baggage headers to Huey tasks (#2792) by @cnschn - Added Event type (#2753) by @szokeasaurusrex - Improve scrub_dict typing (#2768) by @szokeasaurusrex - Dependencies: bump types-protobuf from 4.24.0.20240302 to 4.24.0.20240311 (#2797) by @dependabot ## 1.41.0 ### Various fixes & improvements - Add recursive scrubbing to `EventScrubber` (#2755) by @Cheapshot003 By default, the `EventScrubber` will not search your events for potential PII recursively. With this release, you can enable this behavior with: ```python import sentry_sdk from sentry_sdk.scrubber import EventScrubber sentry_sdk.init( # ...your usual settings... event_scrubber=EventScrubber(recursive=True), ) ``` - Expose `socket_options` (#2786) by @sentrivana If the SDK is experiencing connection issues (connection resets, server closing connection without response, etc.) while sending events to Sentry, tweaking the default `urllib3` socket options to the following can help: ```python import socket from urllib3.connection import HTTPConnection import sentry_sdk sentry_sdk.init( # ...your usual settings... socket_options=HTTPConnection.default_socket_options + [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), # note: skip the following line if you're on MacOS since TCP_KEEPIDLE doesn't exist there (socket.SOL_TCP, socket.TCP_KEEPIDLE, 45), (socket.SOL_TCP, socket.TCP_KEEPINTVL, 10), (socket.SOL_TCP, socket.TCP_KEEPCNT, 6), ], ) ``` - Allow to configure merge target for releases (#2777) by @sentrivana - Allow empty character in metric tags values (#2775) by @viglia - Replace invalid tag values with an empty string instead of _ (#2773) by @markushi - Add documentation comment to `scrub_list` (#2769) by @szokeasaurusrex - Fixed regex to parse version in lambda package file (#2767) by @antonpirker - xfail broken AWS Lambda tests for now (#2794) by @sentrivana - Removed print statements because it messes with the tests (#2789) by @antonpirker - Bump `types-protobuf` from 4.24.0.20240129 to 4.24.0.20240302 (#2782) by @dependabot - Bump `checkouts/data-schemas` from `eb941c2` to `ed078ed` (#2781) by @dependabot ## 1.40.6 ### Various fixes & improvements - Fix compatibility with `greenlet`/`gevent` (#2756) by @sentrivana - Fix query source relative filepath (#2717) by @gggritso - Support `clickhouse-driver==0.2.7` (#2752) by @sentrivana - Bump `checkouts/data-schemas` from `6121fd3` to `eb941c2` (#2747) by @dependabot ## 1.40.5 ### Various fixes & improvements - Deprecate `last_event_id()`. (#2749) by @antonpirker - Warn if uWSGI is set up without proper thread support (#2738) by @sentrivana uWSGI has to be run in threaded mode for the SDK to run properly. If this is not the case, the consequences could range from features not working unexpectedly to uWSGI workers crashing. Please make sure to run uWSGI with both `--enable-threads` and `--py-call-uwsgi-fork-hooks`. - `parsed_url` can be `None` (#2734) by @sentrivana - Python 3.7 is not supported anymore by Lambda, so removed it and added 3.12 (#2729) by @antonpirker ## 1.40.4 ### Various fixes & improvements - Only start metrics flusher thread on demand (#2727) by @sentrivana - Bump checkouts/data-schemas from `aa7058c` to `6121fd3` (#2724) by @dependabot ## 1.40.3 ### Various fixes & improvements - Turn off metrics for uWSGI (#2720) by @sentrivana - Minor improvements (#2714) by @antonpirker ## 1.40.2 ### Various fixes & improvements - test: Fix `pytest` error (#2712) by @szokeasaurusrex - build(deps): bump types-protobuf from 4.24.0.4 to 4.24.0.20240129 (#2691) by @dependabot ## 1.40.1 ### Various fixes & improvements - Fix uWSGI workers hanging (#2694) by @sentrivana - Make metrics work with `gevent` (#2694) by @sentrivana - Guard against `engine.url` being `None` (#2708) by @sentrivana - Fix performance regression in `sentry_sdk.utils._generate_installed_modules` (#2703) by @GlenWalker - Guard against Sentry initialization mid SQLAlchemy cursor (#2702) by @apmorton - Fix yaml generation script (#2695) by @sentrivana - Fix AWS Lambda workflow (#2710) by @sentrivana - Bump `codecov/codecov-action` from 3 to 4 (#2706) by @dependabot - Bump `actions/cache` from 3 to 4 (#2661) by @dependabot - Bump `actions/checkout` from 3.1.0 to 4.1.1 (#2561) by @dependabot - Bump `github/codeql-action` from 2 to 3 (#2603) by @dependabot - Bump `actions/setup-python` from 4 to 5 (#2577) by @dependabot ## 1.40.0 ### Various fixes & improvements - Enable metrics related settings by default (#2685) by @iambriccardo - Fix `UnicodeDecodeError` on Python 2 (#2657) by @sentrivana - Enable DB query source by default (#2629) by @sentrivana - Fix query source duration check (#2675) by @sentrivana - Reformat with `black==24.1.0` (#2680) by @sentrivana - Cleaning up existing code to prepare for new Scopes API (#2611) by @antonpirker - Moved redis related tests to databases (#2674) by @antonpirker - Improve `sentry_sdk.trace` type hints (#2633) by @szokeasaurusrex - Bump `checkouts/data-schemas` from `e9f7d58` to `aa7058c` (#2639) by @dependabot ## 1.39.2 ### Various fixes & improvements - Fix timestamp in transaction created by OTel (#2627) by @antonpirker - Fix relative path in DB query source (#2624) by @antonpirker - Run more CI checks on 2.0 branch (#2625) by @sentrivana - Fix tracing `TypeError` for static and class methods (#2559) by @szokeasaurusrex - Fix missing `ctx` in Arq integration (#2600) by @ivanovart - Change `data_category` from `check_in` to `monitor` (#2598) by @sentrivana ## 1.39.1 ### Various fixes & improvements - Fix psycopg2 detection in the Django integration (#2593) by @sentrivana - Filter out empty string releases (#2591) by @sentrivana - Fixed local var not present when there is an error in a user's `error_sampler` function (#2511) by @antonpirker - Fixed typing in `aiohttp` (#2590) by @antonpirker ## 1.39.0 ### Various fixes & improvements - Add support for cluster clients from Redis SDK (#2394) by @md384 - Improve location reporting for timer metrics (#2552) by @mitsuhiko - Fix Celery `TypeError` with no-argument `apply_async` (#2575) by @szokeasaurusrex - Fix Lambda integration with EventBridge source (#2546) by @davidcroda - Add max tries to Spotlight (#2571) by @hazAT - Handle `os.path.devnull` access issues (#2579) by @sentrivana - Change `code.filepath` frame picking logic (#2568) by @sentrivana - Trigger AWS Lambda tests on label (#2538) by @sentrivana - Run permissions step on pull_request_target but not push (#2548) by @sentrivana - Hash AWS Lambda test functions based on current revision (#2557) by @sentrivana - Update Django version in tests (#2562) by @sentrivana - Make metrics tests non-flaky (#2572) by @antonpirker ## 1.38.0 ### Various fixes & improvements - Only add trace context to checkins and do not run `event_processors` for checkins (#2536) by @antonpirker - Metric span summaries (#2522) by @mitsuhiko - Add source context to code locations (#2539) by @jan-auer - Use in-app filepath instead of absolute path (#2541) by @antonpirker - Switch to `jinja2` for generating CI yamls (#2534) by @sentrivana ## 1.37.1 ### Various fixes & improvements - Fix `NameError` on `parse_version` with eventlet (#2532) by @sentrivana - build(deps): bump checkouts/data-schemas from `68def1e` to `e9f7d58` (#2501) by @dependabot ## 1.37.0 ### Various fixes & improvements - Move installed modules code to utils (#2429) by @sentrivana Note: We moved the internal function `_get_installed_modules` from `sentry_sdk.integrations.modules` to `sentry_sdk.utils`. So if you use this function you have to update your imports - Add code locations for metrics (#2526) by @jan-auer - Add query source to DB spans (#2521) by @antonpirker - Send events to Spotlight sidecar (#2524) by @HazAT - Run integration tests with newest `pytest` (#2518) by @sentrivana - Bring tests up to date (#2512) by @sentrivana - Fix: Prevent global var from being discarded at shutdown (#2530) by @antonpirker - Fix: Scope transaction source not being updated in scope.span setter (#2519) by @sl0thentr0py ## 1.36.0 ### Various fixes & improvements - Django: Support Django 5.0 (#2490) by @sentrivana - Django: Handling ASGI body in the right way. (#2513) by @antonpirker - Flask: Test with Flask 3.0 (#2506) by @sentrivana - Celery: Do not create a span when task is triggered by Celery Beat (#2510) by @antonpirker - Redis: Ensure `RedisIntegration` is disabled, unless `redis` is installed (#2504) by @szokeasaurusrex - Quart: Fix Quart integration for Quart 0.19.4 (#2516) by @antonpirker - gRPC: Make async gRPC less noisy (#2507) by @jyggen ## 1.35.0 ### Various fixes & improvements - **Updated gRPC integration:** Asyncio interceptors and easier setup (#2369) by @fdellekart Our gRPC integration now instruments incoming unary-unary grpc requests and outgoing unary-unary, unary-stream grpc requests using grpcio channels. Everything works now for sync and async code. Before this release you had to add Sentry interceptors by hand to your gRPC code, now the only thing you need to do is adding the `GRPCIntegration` to you `sentry_sdk_init()` call. (See [documentation](https://docs.sentry.io/platforms/python/integrations/grpc/) for more information): ```python import sentry_sdk from sentry_sdk.integrations.grpc import GRPCIntegration sentry_sdk.init( dsn="___PUBLIC_DSN___", enable_tracing=True, integrations=[ GRPCIntegration(), ], ) ``` The old way still works, but we strongly encourage you to update your code to the way described above. - Python 3.12: Replace deprecated datetime functions (#2502) by @sentrivana - Metrics: Unify datetime format (#2409) by @mitsuhiko - Celery: Set correct data in `check_in`s (#2500) by @antonpirker - Celery: Read timezone for Crons monitors from `celery_schedule` if existing (#2497) by @antonpirker - Django: Removing redundant code in Django tests (#2491) by @vagi8 - Django: Make reading the request body work in Django ASGI apps. (#2495) by @antonpirker - FastAPI: Use wraps on fastapi request call wrapper (#2476) by @nkaras - Fix: Probe for psycopg2 and psycopg3 parameters function. (#2492) by @antonpirker - Fix: Remove unnecessary TYPE_CHECKING alias (#2467) by @rafrafek ## 1.34.0 ### Various fixes & improvements - Added Python 3.12 support (#2471, #2483) - Handle missing `connection_kwargs` in `patch_redis_client` (#2482) by @szokeasaurusrex - Run common test suite on Python 3.12 (#2479) by @sentrivana ## 1.33.1 ### Various fixes & improvements - Make parse_version work in utils.py itself. (#2474) by @antonpirker ## 1.33.0 ### Various fixes & improvements - New: Added `error_sampler` option (#2456) by @szokeasaurusrex - Python 3.12: Detect interpreter in shutdown state on thread spawn (#2468) by @mitsuhiko - Patch eventlet under Sentry SDK (#2464) by @szokeasaurusrex - Mitigate CPU spikes when sending lots of events with lots of data (#2449) by @antonpirker - Make `debug` option also configurable via environment (#2450) by @antonpirker - Make sure `get_dsn_parameters` is an actual function (#2441) by @sentrivana - Bump pytest-localserver, add compat comment (#2448) by @sentrivana - AWS Lambda: Update compatible runtimes for AWS Lambda layer (#2453) by @antonpirker - AWS Lambda: Load AWS Lambda secrets in Github CI (#2153) by @antonpirker - Redis: Connection attributes in `redis` database spans (#2398) by @antonpirker - Falcon: Falcon integration checks response status before reporting error (#2465) by @szokeasaurusrex - Quart: Support Quart 0.19 onwards (#2403) by @pgjones - Sanic: Sanic integration initial version (#2419) by @szokeasaurusrex - Django: Fix parsing of Django `path` patterns (#2452) by @sentrivana - Django: Add Django 4.2 to test suite (#2462) by @sentrivana - Polish changelog (#2434) by @sentrivana - Update CONTRIBUTING.md (#2443) by @krishvsoni - Update README.md (#2435) by @sentrivana ## 1.32.0 ### Various fixes & improvements - **New:** Error monitoring for some of the most popular Python GraphQL libraries: - Add [GQL GraphQL integration](https://docs.sentry.io/platforms/python/integrations/gql/) (#2368) by @szokeasaurusrex Usage: ```python import sentry_sdk from sentry_sdk.integrations.gql import GQLIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ GQLIntegration(), ], ) ``` - Add [Graphene GraphQL error integration](https://docs.sentry.io/platforms/python/integrations/graphene/) (#2389) by @sentrivana Usage: ```python import sentry_sdk from sentry_sdk.integrations.graphene import GrapheneIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ GrapheneIntegration(), ], ) ``` - Add [Strawberry GraphQL error & tracing integration](https://docs.sentry.io/platforms/python/integrations/strawberry/) (#2393) by @sentrivana Usage: ```python import sentry_sdk from sentry_sdk.integrations.strawberry import StrawberryIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ # make sure to set async_execution to False if you're executing # GraphQL queries synchronously StrawberryIntegration(async_execution=True), ], traces_sample_rate=1.0, ) ``` - Add [Ariadne GraphQL error integration](https://docs.sentry.io/platforms/python/integrations/ariadne/) (#2387) by @sentrivana Usage: ```python import sentry_sdk from sentry_sdk.integrations.ariadne import AriadneIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ AriadneIntegration(), ], ) ``` - Capture multiple named groups again (#2432) by @sentrivana - Don't fail when upstream scheme is unusual (#2371) by @vanschelven - Support new RQ version (#2405) by @antonpirker - Remove `utcnow`, `utcfromtimestamp` deprecated in Python 3.12 (#2415) by @rmad17 - Add `trace` to `__all__` in top-level `__init__.py` (#2401) by @lobsterkatie - Move minimetrics code to the SDK (#2385) by @mitsuhiko - Add configurable compression levels (#2382) by @mitsuhiko - Shift flushing by up to a rollup window (#2396) by @mitsuhiko - Make a consistent noop flush behavior (#2428) by @mitsuhiko - Stronger recursion protection (#2426) by @mitsuhiko - Remove `OpenTelemetryIntegration` from `__init__.py` (#2379) by @sentrivana - Update API docs (#2397) by @antonpirker - Pin some test requirements because new majors break our tests (#2404) by @antonpirker - Run more `requests`, `celery`, `falcon` tests (#2414) by @sentrivana - Move `importorskip`s in tests to `__init__.py` files (#2412) by @sentrivana - Fix `mypy` errors (#2433) by @sentrivana - Fix pre-commit issues (#2424) by @bukzor-sentryio - Update [CONTRIBUTING.md](https://github.com/getsentry/sentry-python/blob/master/CONTRIBUTING.md) (#2411) by @sentrivana - Bump `sphinx` from 7.2.5 to 7.2.6 (#2378) by @dependabot - [Experimental] Add explain plan to DB spans (#2315) by @antonpirker ## 1.31.0 ### Various fixes & improvements - **New:** Add integration for `clickhouse-driver` (#2167) by @mimre25 For more information, see the documentation for [clickhouse-driver](https://docs.sentry.io/platforms/python/configuration/integrations/clickhouse-driver) for more information. Usage: ```python import sentry_sdk from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ ClickhouseDriverIntegration(), ], ) ``` - **New:** Add integration for `asyncpg` (#2314) by @mimre25 For more information, see the documentation for [asyncpg](https://docs.sentry.io/platforms/python/configuration/integrations/asyncpg/) for more information. Usage: ```python import sentry_sdk from sentry_sdk.integrations.asyncpg import AsyncPGIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ AsyncPGIntegration(), ], ) ``` - **New:** Allow to override `propagate_traces` in `Celery` per task (#2331) by @jan-auer For more information, see the documentation for [Celery](https://docs.sentry.io//platforms/python/guides/celery/#distributed-traces) for more information. Usage: ```python import sentry_sdk from sentry_sdk.integrations.celery import CeleryIntegration # Enable global distributed traces (this is the default, just to be explicit.) sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ CeleryIntegration(propagate_traces=True), ], ) ... # This will NOT propagate the trace. (The task will start its own trace): my_task_b.apply_async( args=("some_parameter", ), headers={"sentry-propagate-traces": False}, ) ``` - Prevent Falcon integration from breaking ASGI apps (#2359) by @szokeasaurusrex - Backpressure: only downsample a max of 10 times (#2347) by @sl0thentr0py - Made NoOpSpan compatible to Transactions. (#2364) by @antonpirker - Cleanup ASGI integration (#2335) by @antonpirker - Pin anyio in tests (dep of httpx), because new major 4.0.0 breaks tests. (#2336) by @antonpirker - Added link to backpressure section in docs. (#2354) by @antonpirker - Add .vscode to .gitignore (#2317) by @shoaib-mohd - Documenting Spans and Transactions (#2358) by @antonpirker - Fix in profiler: do not call getcwd from module root (#2329) by @Zylphrex - Fix deprecated version attribute (#2338) by @vagi8 - Fix transaction name in Starlette and FastAPI (#2341) by @antonpirker - Fix tests using Postgres (#2362) by @antonpirker - build(deps): Updated linting tooling (#2350) by @antonpirker - build(deps): bump sphinx from 7.2.4 to 7.2.5 (#2344) by @dependabot - build(deps): bump actions/checkout from 2 to 4 (#2352) by @dependabot - build(deps): bump checkouts/data-schemas from `ebc77d3` to `68def1e` (#2351) by @dependabot ## 1.30.0 ### Various fixes & improvements - Officially support Python 3.11 (#2300) by @sentrivana - Context manager monitor (#2290) by @szokeasaurusrex - Set response status code in transaction `response` context. (#2312) by @antonpirker - Add missing context kwarg to `_sentry_task_factory` (#2267) by @JohnnyDeuss - In Postgres take the connection params from the connection (#2308) by @antonpirker - Experimental: Allow using OTel for performance instrumentation (#2272) by @sentrivana This release includes experimental support for replacing Sentry's default performance monitoring solution with one powered by OpenTelemetry without having to do any manual setup. Try it out by installing `pip install sentry-sdk[opentelemetry-experimental]` and then initializing the SDK with: ```python sentry_sdk.init( # ...your usual options... _experiments={"otel_powered_performance": True}, ) ``` This enables OpenTelemetry performance monitoring support for some of the most popular frameworks and libraries (Flask, Django, FastAPI, requests...). We're looking forward to your feedback! Please let us know about your experience in this discussion: https://github.com/getsentry/sentry/discussions/55023 **Important note:** Please note that this feature is experimental and in a proof-of-concept stage and is not meant for production use. It may be changed or removed at any point. - Enable backpressure handling by default (#2298) by @sl0thentr0py The SDK now dynamically downsamples transactions to reduce backpressure in high throughput systems. It starts a new `Monitor` thread to perform some health checks which decide to downsample (halved each time) in 10 second intervals till the system is healthy again. To disable this behavior, use: ```python sentry_sdk.init( # ...your usual options... enable_backpressure_handling=False, ) ``` If your system serves heavy load, please let us know how this feature works for you! Check out the [documentation](https://docs.sentry.io/platforms/python/configuration/options/#enable-backpressure-handling) for more information. - Stop recording spans for internal web requests to Sentry (#2297) by @szokeasaurusrex - Add test for `ThreadPoolExecutor` (#2259) by @gggritso - Add docstrings for `Scope.update_from_*` (#2311) by @sentrivana - Moved `is_sentry_url` to utils (#2304) by @szokeasaurusrex - Fix: arq attribute error on settings, support worker args (#2260) by @rossmacarthur - Fix: Exceptions include detail property for their value (#2193) by @nicolassanmar - build(deps): bump mypy from 1.4.1 to 1.5.1 (#2319) by @dependabot - build(deps): bump sphinx from 7.1.2 to 7.2.4 (#2322) by @dependabot - build(deps): bump sphinx from 7.0.1 to 7.1.2 (#2296) by @dependabot - build(deps): bump checkouts/data-schemas from `1b85152` to `ebc77d3` (#2254) by @dependabot ## 1.29.2 ### Various fixes & improvements - Revert GraphQL integration (#2287) by @sentrivana ## 1.29.1 ### Various fixes & improvements - Fix GraphQL integration swallowing responses (#2286) by @sentrivana - Fix typo (#2283) by @sentrivana ## 1.29.0 ### Various fixes & improvements - Capture GraphQL client errors (#2243) by @sentrivana - The SDK will now create dedicated errors whenever an HTTP client makes a request to a `/graphql` endpoint and the response contains an error. You can opt out of this by providing `capture_graphql_errors=False` to the HTTP client integration. - Read MAX_VALUE_LENGTH from client options (#2121) (#2171) by @puittenbroek - Rename `request_bodies` to `max_request_body_size` (#2247) by @mgaligniana - Always sample checkin regardless of `sample_rate` (#2279) by @szokeasaurusrex - Add information to short-interval cron error message (#2246) by @lobsterkatie - Add DB connection attributes in spans (#2274) by @antonpirker - Add `db.system` to remaining Redis spans (#2271) by @AbhiPrasad - Clarified the procedure for running tests (#2276) by @szokeasaurusrex - Fix Chalice tests (#2278) by @sentrivana - Bump Black from 23.3.0 to 23.7.0 (#2256) by @dependabot - Remove py3.4 from tox.ini (#2248) by @sentrivana ## 1.28.1 ### Various fixes & improvements - Redis: Add support for redis.asyncio (#1933) by @Zhenay - Make sure each task that is started by Celery Beat has its own trace. (#2249) by @antonpirker - Add Sampling Decision to Trace Envelope Header (#2239) by @antonpirker - Do not add trace headers (`sentry-trace` and `baggage`) to HTTP requests to Sentry (#2240) by @antonpirker - Prevent adding `sentry-trace` header multiple times (#2235) by @antonpirker - Skip distributions with incomplete metadata (#2231) by @rominf - Remove stale.yml (#2245) by @hubertdeng123 - Django: Fix 404 Handler handler being labeled as "generic ASGI request" (#1277) by @BeryJu ## 1.28.0 ### Various fixes & improvements - Add support for cron jobs in ARQ integration (#2088) by @lewazo - Backpressure handling prototype (#2189) by @sl0thentr0py - Add "replay" context to event payload (#2234) by @antonpirker - Update test Django app to be compatible for Django 4.x (#1794) by @DilLip-Chowdary-Codes ## 1.27.1 ### Various fixes & improvements - Add Starlette/FastAPI template tag for adding Sentry tracing information (#2225) by @antonpirker - By adding `{{ sentry_trace_meta }}` to your Starlette/FastAPI Jinja2 templates we will include Sentry trace information as a meta tag in the rendered HTML to allow your frontend to pick up and continue the trace started in the backend. - Fixed generation of baggage when a DSC is already in propagation context (#2232) by @antonpirker - Handle explicitly passing `None` for `trace_configs` in `aiohttp` (#2230) by @Harmon758 - Support newest Starlette versions (#2227) by @antonpirker ## 1.27.0 ### Various fixes & improvements - Support for SQLAlchemy 2.0 (#2200) by @antonpirker - Add instrumentation of `aiohttp` client requests (#1761) by @md384 - Add Django template tag for adding Sentry tracing information (#2222) by @antonpirker - By adding `{{ sentry_trace_meta }}` to your Django templates we will include Sentry trace information as a meta tag in the rendered HTML to allow your frontend to pick up and continue the trace started in the backend. - Update Flask HTML meta helper (#2203) by @antonpirker - Take trace ID always from propagation context (#2209) by @antonpirker - Fix trace context in event payload (#2205) by @antonpirker - Use new top level API in `trace_propagation_meta` (#2202) by @antonpirker - Do not overwrite existing baggage on outgoing requests (#2191, #2214) by @sentrivana - Set the transaction/span status from an OTel span (#2115) by @daniil-konovalenko - Fix propagation of OTel `NonRecordingSpan` (#2187) by @hartungstenio - Fix `TaskLockedException` handling in Huey integration (#2206) by @Zhenay - Add message format configuration arguments to Loguru integration (#2208) by @Gwill - Profiling: Add client reports for profiles (#2207) by @Zylphrex - CI: Fix CI (#2220) by @antonpirker - Dependencies: Bump `checkouts/data-schemas` from `7fdde87` to `1b85152` (#2218) by @dependabot - Dependencies: Bump `mypy` from 1.3.0 to 1.4.1 (#2194) by @dependabot - Docs: Change API doc theme (#2210) by @sentrivana - Docs: Allow (some) autocompletion for top-level API (#2213) by @sentrivana - Docs: Revert autocomplete hack (#2224) by @sentrivana ## 1.26.0 ### Various fixes & improvements - Tracing without performance (#2136) by @antonpirker - Load tracing information from environment (#2176) by @antonpirker - Auto-enable HTTPX integration if HTTPX installed (#2177) by @sentrivana - Support for SOCKS proxies (#1050) by @Roguelazer - Wrap `parse_url` calls in `capture_internal_exceptions` (#2162) by @sentrivana - Run 2.7 tests in CI again (#2181) by @sentrivana - Crons: Do not support sub-minute cron intervals (#2172) by @antonpirker - Profile: Add function name to profiler frame cache (#2164) by @Zylphrex - Dependencies: bump checkouts/data-schemas from `0ed3357` to `7fdde87` (#2165) by @dependabot - Update changelog (#2163) by @sentrivana ## 1.25.1 ### Django update (ongoing) Collections of improvements to our Django integration. By: @mgaligniana (#1773) ### Various fixes & improvements - Fix `parse_url` (#2161) by @sentrivana and @antonpirker Our URL sanitization used in multiple integrations broke with the recent Python security update. If you started seeing `ValueError`s with `"'Filtered' does not appear to be an IPv4 or IPv6 address"`, this release fixes that. See [the original issue](https://github.com/getsentry/sentry-python/issues/2160) for more context. - Better version parsing in integrations (#2152) by @antonpirker We now properly support all integration versions that conform to [PEP 440](https://peps.python.org/pep-0440/). This replaces our naïve version parsing that wouldn't accept versions such as `2.0.0rc1` or `2.0.5.post1`. - Align HTTP status code as span data field `http.response.status_code` (#2113) by @antonpirker - Do not encode cached value to determine size (#2143) by @sentrivana - Fix using `unittest.mock` whenever available (#1926) by @mgorny - Fix 2.7 `common` tests (#2145) by @sentrivana - Bump `actions/stale` from `6` to `8` (#1978) by @dependabot - Bump `black` from `22.12.0` to `23.3.0` (#1984) by @dependabot - Bump `mypy` from `1.2.0` to `1.3.0` (#2110) by @dependabot - Bump `sphinx` from `5.3.0` to `7.0.1` (#2112) by @dependabot ## 1.25.0 ### Various fixes & improvements - Support urllib3>=2.0.0 (#2148) by @asottile-sentry We're now supporting urllib3's new major version, 2.0.0. If you encounter issues (e.g. some of your dependencies not supporting the new urllib3 version yet) you might consider pinning the urllib3 version to `<2.0.0` manually in your project. Check out the [the urllib3 migration guide](https://urllib3.readthedocs.io/en/latest/v2-migration-guide.html#migrating-as-an-application-developer) for details. - Auto-retry tests on failure (#2134) by @sentrivana - Correct `importlib.metadata` check in `test_modules` (#2149) by @asottile-sentry - Fix distribution name normalization (PEP-0503) (#2144) by @rominf - Fix `functions_to_trace` typing (#2141) by @rcmarron ## 1.24.0 ### Various fixes & improvements - **New:** Celery Beat exclude tasks option (#2130) by @antonpirker You can exclude Celery Beat tasks from being auto-instrumented. To do this, add a list of tasks you want to exclude as option `exclude_beat_tasks` when creating `CeleryIntegration`. The list can contain simple strings with the full task name, as specified in the Celery Beat schedule, or regular expressions to match multiple tasks. For more information, see the documentation for [Crons](https://docs.sentry.io/platforms/python/guides/celery/crons/) for more information. Usage: ```python exclude_beat_tasks = [ "some-task-a", "payment-check-.*", ] sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ CeleryIntegration( monitor_beat_tasks=True, exclude_beat_tasks=exclude_beat_tasks, ), ], ) ``` In this example the task `some-task-a` and all tasks with a name starting with `payment-check-` will be ignored. - **New:** Add support for **ExceptionGroups** (#2025) by @antonpirker _Note:_ If running Self-Hosted Sentry, you should wait to adopt this SDK update until after updating to the 23.6.0 (est. June 2023) release of Sentry. Updating early will not break anything, but you will not get the full benefit of the Exception Groups improvements to issue grouping that were added to the Sentry backend. - Prefer `importlib.metadata` over `pkg_resources` if available (#2081) by @sentrivana - Work with a copy of request, vars in the event (#2125) by @sentrivana - Pinned version of dependency that broke the build (#2133) by @antonpirker ## 1.23.1 ### Various fixes & improvements - Disable Django Cache spans by default. (#2120) by @antonpirker ## 1.23.0 ### Various fixes & improvements - **New:** Add `loguru` integration (#1994) by @PerchunPak Check [the documentation](https://docs.sentry.io/platforms/python/configuration/integrations/loguru/) for more information. Usage: ```python from loguru import logger import sentry_sdk from sentry_sdk.integrations.loguru import LoguruIntegration sentry_sdk.init( dsn="___PUBLIC_DSN___", integrations=[ LoguruIntegration(), ], ) logger.debug("I am ignored") logger.info("I am a breadcrumb") logger.error("I am an event", extra=dict(bar=43)) logger.exception("An exception happened") ``` - An error event with the message `"I am an event"` will be created. - `"I am a breadcrumb"` will be attached as a breadcrumb to that event. - `bar` will end up in the `extra` attributes of that event. - `"An exception happened"` will send the current exception from `sys.exc_info()` with the stack trace to Sentry. If there's no exception, the current stack will be attached. - The debug message `"I am ignored"` will not be captured by Sentry. To capture it, set `level` to `DEBUG` or lower in `LoguruIntegration`. - Do not truncate request body if `request_bodies` is `"always"` (#2092) by @sentrivana - Fixed Celery headers for Beat auto-instrumentation (#2102) by @antonpirker - Add `db.operation` to Redis and MongoDB spans (#2089) by @antonpirker - Make sure we're importing `redis` the library (#2106) by @sentrivana - Add `include_source_context` option (#2020) by @farhat-nawaz and @sentrivana - Import `Markup` from `markupsafe` (#2047) by @rco-ableton - Fix `__qualname__` missing attribute in asyncio integration (#2105) by @sl0thentr0py - Remove relay extension from AWS Layer (#2068) by @sl0thentr0py - Add a note about `pip freeze` to the bug template (#2103) by @sentrivana ## 1.22.2 ### Various fixes & improvements - Fix: Django caching spans when using keyword arguments (#2086) by @antonpirker - Fix: Duration in Celery Beat tasks monitoring (#2087) by @antonpirker - Fix: Docstrings of SPANDATA (#2084) by @antonpirker ## 1.22.1 ### Various fixes & improvements - Fix: Handle a list of keys (not just a single key) in Django cache spans (#2082) by @antonpirker ## 1.22.0 ### Various fixes & improvements - Add `cache.hit` and `cache.item_size` to Django (#2057) by @antonpirker _Note:_ This will add spans for all requests to the caches configured in Django. This will probably add some overhead to your server an also add multiple spans to your performance waterfall diagrams. If you do not want this, you can disable this feature in the DjangoIntegration: ```python sentry_sdk.init( dsn="...", integrations=[ DjangoIntegration(cache_spans=False), ] ) ``` - Use `http.method` instead of `method` (#2054) by @AbhiPrasad - Handle non-int `exc.status_code` in Starlette (#2075) by @sentrivana - Handle SQLAlchemy `engine.name` being bytes (#2074) by @sentrivana - Fix `KeyError` in `capture_checkin` if SDK is not initialized (#2073) by @antonpirker - Use `functools.wrap` for `ThreadingIntegration` patches to fix attributes (#2080) by @EpicWink - Pin `urllib3` to <2.0.0 for now (#2069) by @sl0thentr0py ## 1.21.1 ### Various fixes & improvements - Do not send monitor_config when unset (#2058) by @evanpurkhiser - Add `db.system` span data (#2040, #2042) by @antonpirker - Fix memory leak in profiling (#2049) by @Zylphrex - Fix crash loop when returning none in before_send (#2045) by @sentrivana ## 1.21.0 ### Various fixes & improvements - Better handling of redis span/breadcrumb data (#2033) by @antonpirker _Note:_ With this release we will limit the description of redis db spans and the data in breadcrumbs represting redis db operations to 1024 characters. This can can lead to truncated data. If you do not want this there is a new parameter `max_data_size` in `RedisIntegration`. You can set this to `None` for disabling trimming. Example for **disabling** trimming of redis commands in spans or breadcrumbs: ```python sentry_sdk.init( integrations=[ RedisIntegration(max_data_size=None), ] ) ``` Example for custom trim size of redis commands in spans or breadcrumbs: ```python sentry_sdk.init( integrations=[ RedisIntegration(max_data_size=50), ] )` ``` - Add `db.system` to redis and SQLAlchemy db spans (#2037, #2038, #2039) (#2037) by @AbhiPrasad - Upgraded linting tooling (#2026) by @antonpirker - Made code more resilient. (#2031) by @antonpirker ## 1.20.0 ### Various fixes & improvements - Send all events to /envelope endpoint when tracing is enabled (#2009) by @antonpirker _Note:_ If you’re self-hosting Sentry 9, you need to stay in the previous version of the SDK or update your self-hosted to at least 20.6.0 - Profiling: Remove profile context from SDK (#2013) by @Zylphrex - Profiling: Additionl performance improvements to the profiler (#1991) by @Zylphrex - Fix: Celery Beat monitoring without restarting the Beat process (#2001) by @antonpirker - Fix: Using the Codecov uploader instead of deprecated python package (#2011) by @antonpirker - Fix: Support for Quart (#2003)` (#2003) by @antonpirker ## 1.19.1 ### Various fixes & improvements - Make auto monitoring beat update support Celery 4 and 5 (#1989) by @antonpirker ## 1.19.0 ### Various fixes & improvements - **New:** [Celery Beat](https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html) auto monitoring (#1967) by @antonpirker The CeleryIntegration can now also monitor your Celery Beat scheduled tasks automatically using the new [Crons](https://blog.sentry.io/2023/01/04/cron-job-monitoring-beta-because-scheduled-jobs-fail-too/) feature of Sentry. To learn more see our [Celery Beat Auto Discovery](https://docs.sentry.io/platforms/python/guides/celery/crons/) documentation. Usage: ```python from celery import Celery, signals from celery.schedules import crontab import sentry_sdk from sentry_sdk.integrations.celery import CeleryIntegration app = Celery('tasks', broker='...') app.conf.beat_schedule = { 'set-in-beat-schedule': { 'task': 'tasks.some_important_task', 'schedule': crontab(...), }, } @signals.celeryd_init.connect def init_sentry(**kwargs): sentry_sdk.init( dsn='...', integrations=[CeleryIntegration(monitor_beat_tasks=True)], # 👈 here environment="local.dev.grace", release="v1.0", ) ``` This will auto detect all schedules tasks in your `beat_schedule` and will monitor them with Sentry [Crons](https://blog.sentry.io/2023/01/04/cron-job-monitoring-beta-because-scheduled-jobs-fail-too/). - **New:** [gRPC](https://grpc.io/) integration (#1911) by @hossein-raeisi The [gRPC](https://grpc.io/) integration instruments all incoming requests and outgoing unary-unary, unary-stream grpc requests using grpcio channels. To learn more see our [gRPC Integration](https://docs.sentry.io/platforms/python/configuration/integrations/grpc/) documentation. On the server: ```python import grpc from sentry_sdk.integrations.grpc.server import ServerInterceptor server = grpc.server( thread_pool=..., interceptors=[ServerInterceptor()], ) ``` On the client: ```python import grpc from sentry_sdk.integrations.grpc.client import ClientInterceptor with grpc.insecure_channel("example.com:12345") as channel: channel = grpc.intercept_channel(channel, *[ClientInterceptor()]) ``` - **New:** socket integration (#1911) by @hossein-raeisi Use this integration to create spans for DNS resolves (`socket.getaddrinfo()`) and connection creations (`socket.create_connection()`). To learn more see our [Socket Integration](https://docs.sentry.io/platforms/python/configuration/integrations/socket/) documentation. Usage: ```python import sentry_sdk from sentry_sdk.integrations.socket import SocketIntegration sentry_sdk.init( dsn="___PUBLIC_DSN___", integrations=[ SocketIntegration(), ], ) ``` - Fix: Do not trim span descriptions. (#1983) by @antonpirker ## 1.18.0 ### Various fixes & improvements - **New:** Implement `EventScrubber` (#1943) by @sl0thentr0py To learn more see our [Scrubbing Sensitive Data](https://docs.sentry.io/platforms/python/data-management/sensitive-data/#event-scrubber) documentation. Add a new `EventScrubber` class that scrubs certain potentially sensitive interfaces with a `DEFAULT_DENYLIST`. The default scrubber is automatically run if `send_default_pii = False`: ```python import sentry_sdk from sentry_sdk.scrubber import EventScrubber sentry_sdk.init( # ... send_default_pii=False, event_scrubber=EventScrubber(), # this is set by default ) ``` You can also pass in a custom `denylist` to the `EventScrubber` class and filter additional fields that you want. ```python from sentry_sdk.scrubber import EventScrubber, DEFAULT_DENYLIST # custom denylist denylist = DEFAULT_DENYLIST + ["my_sensitive_var"] sentry_sdk.init( # ... send_default_pii=False, event_scrubber=EventScrubber(denylist=denylist), ) ``` - **New:** Added new `functions_to_trace` option for central way of performance instrumentation (#1960) by @antonpirker To learn more see our [Tracing Options](https://docs.sentry.io/platforms/python/configuration/options/#functions-to-trace) documentation. An optional list of functions that should be set up for performance monitoring. For each function in the list, a span will be created when the function is executed. ```python functions_to_trace = [ {"qualified_name": "tests.test_basics._hello_world_counter"}, {"qualified_name": "time.sleep"}, {"qualified_name": "collections.Counter.most_common"}, ] sentry_sdk.init( # ... traces_sample_rate=1.0, functions_to_trace=functions_to_trace, ) ``` - Updated denylist to include other widely used cookies/headers (#1972) by @antonpirker - Forward all `sentry-` baggage items (#1970) by @cleptric - Update OSS licensing (#1973) by @antonpirker - Profiling: Handle non frame types in profiler (#1965) by @Zylphrex - Tests: Bad arq dependency in tests (#1966) by @Zylphrex - Better naming (#1962) by @antonpirker ## 1.17.0 ### Various fixes & improvements - **New:** Monitor Celery Beat tasks with Sentry [Cron Monitoring](https://docs.sentry.io/product/crons/). With this feature you can make sure that your Celery beat tasks run at the right time and see if they where successful or not. > **Warning** > Cron Monitoring is currently in beta. Beta features are still in-progress and may have bugs. We recognize the irony. > If you have any questions or feedback, please email us at crons-feedback@sentry.io, reach out via Discord (#cronjobs), or open an issue. Usage: ```python # File: tasks.py from celery import Celery, signals from celery.schedules import crontab import sentry_sdk from sentry_sdk.crons import monitor from sentry_sdk.integrations.celery import CeleryIntegration # 1. Setup your Celery beat configuration app = Celery('mytasks', broker='redis://localhost:6379/0') app.conf.beat_schedule = { 'set-in-beat-schedule': { 'task': 'tasks.tell_the_world', 'schedule': crontab(hour='10', minute='15'), 'args': ("in beat_schedule set", ), }, } # 2. Initialize Sentry either in `celeryd_init` or `beat_init` signal. #@signals.celeryd_init.connect @signals.beat_init.connect def init_sentry(**kwargs): sentry_sdk.init( dsn='...', integrations=[CeleryIntegration()], environment="local.dev.grace", release="v1.0.7-a1", ) # 3. Link your Celery task to a Sentry Cron Monitor @app.task @monitor(monitor_slug='3b861d62-ff82-4aa0-9cd6-b2b6403bd0cf') def tell_the_world(msg): print(msg) ``` - **New:** Add decorator for Sentry tracing (#1089) by @ynouri This allows you to use a decorator to setup custom performance instrumentation. To learn more see [Custom Instrumentation](https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/). Usage: Just add the new decorator to your function, and a span will be created for it: ```python import sentry_sdk @sentry_sdk.trace def my_complex_function(): # do stuff ... ``` - Make Django signals tracing optional (#1929) by @antonpirker See the [Django Guide](https://docs.sentry.io/platforms/python/guides/django) to learn more. - Deprecated `with_locals` in favor of `include_local_variables` (#1924) by @antonpirker - Added top level API to get current span (#1954) by @antonpirker - Profiling: Add profiler options to init (#1947) by @Zylphrex - Profiling: Set active thread id for quart (#1830) by @Zylphrex - Fix: Update `get_json` function call for werkzeug 2.1.0+ (#1939) by @michielderoos - Fix: Returning the tasks result. (#1931) by @antonpirker - Fix: Rename MYPY to TYPE_CHECKING (#1934) by @untitaker - Fix: Fix type annotation for ignore_errors in sentry_sdk.init() (#1928) by @tiangolo - Tests: Start a real http server instead of mocking libs (#1938) by @antonpirker ## 1.16.0 ### Various fixes & improvements - **New:** Add [arq](https://arq-docs.helpmanual.io/) Integration (#1872) by @Zhenay This integration will create performance spans when arq jobs will be enqueued and when they will be run. It will also capture errors in jobs and will link them to the performance spans. Usage: ```python import asyncio from httpx import AsyncClient from arq import create_pool from arq.connections import RedisSettings import sentry_sdk from sentry_sdk.integrations.arq import ArqIntegration from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT sentry_sdk.init( dsn="...", integrations=[ArqIntegration()], ) async def download_content(ctx, url): session: AsyncClient = ctx['session'] response = await session.get(url) print(f'{url}: {response.text:.80}...') return len(response.text) async def startup(ctx): ctx['session'] = AsyncClient() async def shutdown(ctx): await ctx['session'].aclose() async def main(): with sentry_sdk.start_transaction(name="testing_arq_tasks", source=TRANSACTION_SOURCE_COMPONENT): redis = await create_pool(RedisSettings()) for url in ('https://facebook.com', 'https://microsoft.com', 'https://github.com', "asdf" ): await redis.enqueue_job('download_content', url) class WorkerSettings: functions = [download_content] on_startup = startup on_shutdown = shutdown if __name__ == '__main__': asyncio.run(main()) ``` - Update of [Falcon](https://falconframework.org/) Integration (#1733) by @bartolootrit - Adding [Cloud Resource Context](https://docs.sentry.io/platforms/python/configuration/integrations/cloudresourcecontext/) integration (#1882) by @antonpirker - Profiling: Use the transaction timestamps to anchor the profile (#1898) by @Zylphrex - Profiling: Add debug logs to profiling (#1883) by @Zylphrex - Profiling: Start profiler thread lazily (#1903) by @Zylphrex - Fixed checks for structured http data (#1905) by @antonpirker - Make `set_measurement` public api and remove experimental status (#1909) by @sl0thentr0py - Add `trace_propagation_targets` option (#1916) by @antonpirker - Add `enable_tracing` to default traces_sample_rate to 1.0 (#1900) by @sl0thentr0py - Remove deprecated `tracestate` (#1907) by @sl0thentr0py - Sanitize URLs in Span description and breadcrumbs (#1876) by @antonpirker - Mechanism should default to true unless set explicitly (#1889) by @sl0thentr0py - Better setting of in-app in stack frames (#1894) by @antonpirker - Add workflow to test gevent (#1870) by @Zylphrex - Updated outdated HTTPX test matrix (#1917) by @antonpirker - Switch to MIT license (#1908) by @cleptric ## 1.15.0 ### Various fixes & improvements - New: Add [Huey](https://huey.readthedocs.io/en/latest/) Integration (#1555) by @Zhenay This integration will create performance spans when Huey tasks will be enqueued and when they will be executed. Usage: Task definition in `demo.py`: ```python import time from huey import SqliteHuey, crontab import sentry_sdk from sentry_sdk.integrations.huey import HueyIntegration sentry_sdk.init( dsn="...", integrations=[ HueyIntegration(), ], traces_sample_rate=1.0, ) huey = SqliteHuey(filename='/tmp/demo.db') @huey.task() def add_numbers(a, b): return a + b ``` Running the tasks in `run.py`: ```python from demo import add_numbers, flaky_task, nightly_backup import sentry_sdk from sentry_sdk.integrations.huey import HueyIntegration from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction def main(): sentry_sdk.init( dsn="...", integrations=[ HueyIntegration(), ], traces_sample_rate=1.0, ) with sentry_sdk.start_transaction(name="testing_huey_tasks", source=TRANSACTION_SOURCE_COMPONENT): r = add_numbers(1, 2) if __name__ == "__main__": main() ``` - Profiling: Do not send single sample profiles (#1879) by @Zylphrex - Profiling: Add additional test coverage for profiler (#1877) by @Zylphrex - Profiling: Always use builtin time.sleep (#1869) by @Zylphrex - Profiling: Defaul in_app decision to None (#1855) by @Zylphrex - Profiling: Remove use of threading.Event (#1864) by @Zylphrex - Profiling: Enable profiling on all transactions (#1797) by @Zylphrex - FastAPI: Fix check for Starlette in FastAPI integration (#1868) by @antonpirker - Flask: Do not overwrite default for username with email address in FlaskIntegration (#1873) by @homeworkprod - Tests: Add py3.11 to test-common (#1871) by @Zylphrex - Fix: Don't log whole event in before_send / event_processor drops (#1863) by @sl0thentr0py ## 1.14.0 ### Various fixes & improvements - Add `before_send_transaction` (#1840) by @antonpirker Adds a hook (similar to `before_send`) that is called for all transaction events (performance releated data). Usage: ```python import sentry_sdk def strip_sensitive_data(event, hint): # modify event here (or return `None` if you want to drop the event entirely) return event sentry_sdk.init( # ... before_send_transaction=strip_sensitive_data, ) ``` See also: https://docs.sentry.io/platforms/python/configuration/filtering/#using-platformidentifier-namebefore-send-transaction- - Django: Always remove values of Django session related cookies. (#1842) by @antonpirker - Profiling: Enable profiling for ASGI frameworks (#1824) by @Zylphrex - Profiling: Better gevent support (#1822) by @Zylphrex - Profiling: Add profile context to transaction (#1860) by @Zylphrex - Profiling: Use co_qualname in python 3.11 (#1831) by @Zylphrex - OpenTelemetry: fix Use dict for sentry-trace context instead of tuple (#1847) by @AbhiPrasad - OpenTelemetry: fix extra dependency (#1825) by @bernardotorres - OpenTelemetry: fix NoOpSpan updates scope (#1834) by @Zylphrex - OpenTelemetry: Make sure to noop when there is no DSN (#1852) by @antonpirker - FastAPI: Fix middleware being patched multiple times (#1841) by @JohnnyDeuss - Starlette: Avoid import of pkg_resource with Starlette integration (#1836) by @mgu - Removed code coverage target (#1862) by @antonpirker ## 1.13.0 ### Various fixes & improvements - Add Starlite integration (#1748) by @gazorby Adding support for the [Starlite](https://starlite-api.github.io/starlite/1.48/) framework. Unhandled errors are captured. Performance spans for Starlite middleware are also captured. Thanks @gazorby for the great work! Usage: ```python from starlite import Starlite, get import sentry_sdk from sentry_sdk.integrations.starlite import StarliteIntegration sentry_sdk.init( dsn="...", traces_sample_rate=1.0, integrations=[ StarliteIntegration(), ], ) @get("/") def hello_world() -> dict[str, str]: """Keeping the tradition alive with hello world.""" bla = 1/0 # causing an error return {"hello": "world"} app = Starlite(route_handlers=[hello_world]) ``` - Profiling: Remove sample buffer from profiler (#1791) by @Zylphrex - Profiling: Performance tweaks to profile sampler (#1789) by @Zylphrex - Add span for Django SimpleTemplateResponse rendering (#1818) by @chdsbd - Use @wraps for Django Signal receivers (#1815) by @meanmail - Add enqueued_at and started_at to rq job extra (#1024) by @kruvasyan - Remove sanic v22 pin (#1819) by @sl0thentr0py - Add support for `byterray` and `memoryview` built-in types (#1833) by @Tarty - Handle `"rc"` in SQLAlchemy version. (#1812) by @peterschutt - Doc: Use .venv (not .env) as a virtual env location in CONTRIBUTING.md (#1790) by @tonyo - Auto publish to internal pypi on release (#1823) by @asottile-sentry - Added Python 3.11 to test suite (#1795) by @antonpirker - Update test/linting dependencies (#1801) by @antonpirker - Deps: bump sphinx from 5.2.3 to 5.3.0 (#1686) by @dependabot ## 1.12.1 ### Various fixes & improvements - Link errors to OTel spans (#1787) by @antonpirker ## 1.12.0 ### Basic OTel support This adds support to automatically integrate OpenTelemetry performance tracing with Sentry. See the documentation on how to set it up: https://docs.sentry.io/platforms/python/performance/instrumentation/opentelemetry/ Give it a try and let us know if you have any feedback or problems with using it. By: @antonpirker (#1772, #1766, #1765) ### Various fixes & improvements - Tox Cleanup (#1749) by @antonpirker - CI: Fix Github action checks (#1780) by @Zylphrex - Profiling: Introduce active thread id on scope (#1764) by @Zylphrex - Profiling: Eagerly hash stack for profiles (#1755) by @Zylphrex - Profiling: Resolve inherited method class names (#1756) by @Zylphrex ## 1.11.1 ### Various fixes & improvements - Move set_transaction_name out of event processor in fastapi/starlette (#1751) by @sl0thentr0py - Expose proxy_headers as top level config and use in ProxyManager: https://docs.sentry.io/platforms/python/configuration/options/#proxy-headers (#1746) by @sl0thentr0py ## 1.11.0 ### Various fixes & improvements - Fix signals problem on sentry.io (#1732) by @antonpirker - Fix reading FastAPI request body twice. (#1724) by @antonpirker - ref(profiling): Do not error if already setup (#1731) by @Zylphrex - ref(profiling): Use sleep scheduler by default (#1729) by @Zylphrex - feat(profiling): Extract more frame info (#1702) by @Zylphrex - Update actions/upload-artifact to v3.1.1 (#1718) by @mattgauntseo-sentry - Performance optimizations (#1725) by @antonpirker - feat(pymongo): add PyMongo integration (#1590) by @Agalin - Move relay to port 5333 to avoid collisions (#1716) by @sl0thentr0py - fix(utils): strip_string() checks text length counting bytes not chars (#1711) by @mgaligniana - chore: remove jira workflow (#1707) by @vladanpaunovic - build(deps): bump checkouts/data-schemas from `a214fbc` to `20ff3b9` (#1703) by @dependabot - perf(profiling): Tune the sample profile generation code for performance (#1694) by @Zylphrex ## 1.10.1 ### Various fixes & improvements - Bug fixes for FastAPI and Sentry SDK 1.10.0 (#1699) by @antonpirker - The wrapped receive() did not return anything. (#1698) by @antonpirker ## 1.10.0 ### Various fixes & improvements - Unified naming for span ops (#1661) by @antonpirker We have unified the strings of our span operations. See https://develop.sentry.dev/sdk/performance/span-operations/ **WARNING**: If you have Sentry Dashboards or Sentry Discover queries that use `transaction.op` in their fields, conditions, aggregates or columns this change could potentially break your Dashboards/Discover setup. Here is a list of the changes we made to the `op`s. Please adjust your dashboards and Discover queries accordingly: | Old operation (`op`) | New Operation (`op`) | | ------------------------ | ---------------------- | | `asgi.server` | `http.server` | | `aws.request` | `http.client` | | `aws.request.stream` | `http.client.stream` | | `celery.submit` | `queue.submit.celery` | | `celery.task` | `queue.task.celery` | | `django.middleware` | `middleware.django` | | `django.signals` | `event.django` | | `django.template.render` | `template.render` | | `django.view` | `view.render` | | `http` | `http.client` | | `redis` | `db.redis` | | `rq.task` | `queue.task.rq` | | `serverless.function` | `function.aws` | | `serverless.function` | `function.gcp` | | `starlette.middleware` | `middleware.starlette` | - Include framework in SDK name (#1662) by @antonpirker - Asyncio integration (#1671) by @antonpirker - Add exception handling to Asyncio Integration (#1695) by @antonpirker - Fix asyncio task factory (#1689) by @antonpirker - Have instrumentation for ASGI middleware receive/send callbacks. (#1673) by @antonpirker - Use Django internal ASGI handling from Channels version 4.0.0. (#1688) by @antonpirker - fix(integrations): Fix http putrequest when url is None (#1693) by @MattFlower - build(deps): bump checkouts/data-schemas from `f0a57f2` to `a214fbc` (#1627) by @dependabot - build(deps): bump flake8-bugbear from 22.9.11 to 22.9.23 (#1637) by @dependabot - build(deps): bump sphinx from 5.1.1 to 5.2.3 (#1653) by @dependabot - build(deps): bump actions/stale from 5 to 6 (#1638) by @dependabot - build(deps): bump black from 22.8.0 to 22.10.0 (#1670) by @dependabot - Remove unused node setup from ci. (#1681) by @antonpirker - Check for Decimal is in_valid_sample_rate (#1672) by @Arvind2222 - Add session for aiohttp integration (#1605) by @denys-pidlisnyi - feat(profiling): Extract qualified name for each frame (#1669) by @Zylphrex - feat(profiling): Attach thread metadata to profiles (#1660) by @Zylphrex - ref(profiling): Rename profiling frame keys (#1680) by @Zylphrex - fix(profiling): get_frame_name only look at arguments (#1684) by @Zylphrex - fix(profiling): Need to sample profile correctly (#1679) by @Zylphrex - fix(profiling): Race condition spawning multiple profiling threads (#1676) by @Zylphrex - tests(profiling): Add basic profiling tests (#1677) by @Zylphrex - tests(profiling): Add tests for thread schedulers (#1683) by @Zylphrex ## 1.9.10 ### Various fixes & improvements - Use content-length header in ASGI instead of reading request body (#1646, #1631, #1595, #1573) (#1649) by @antonpirker - Added newer Celery versions to test suite (#1655) by @antonpirker - Django 4.x support (#1632) by @antonpirker - Cancel old CI runs when new one is started. (#1651) by @antonpirker - Increase max string size for desc (#1647) by @k-fish - Pin Sanic version for CI (#1650) by @antonpirker - Fix for partial signals in old Django and old Python versions. (#1641) by @antonpirker - Convert profile output to the sample format (#1611) by @phacops - Dynamically adjust profiler sleep time (#1634) by @Zylphrex ## 1.9.9 ### Django update (ongoing) - Instrument Django Signals so they show up in "Performance" view (#1526) by @BeryJu - include other Django enhancements brought up by the community ### Various fixes & improvements - fix(profiling): Profiler mode type hints (#1633) by @Zylphrex - New ASGIMiddleware tests (#1600) by @antonpirker - build(deps): bump mypy from 0.961 to 0.971 (#1517) by @dependabot - build(deps): bump black from 22.3.0 to 22.8.0 (#1596) by @dependabot - build(deps): bump sphinx from 5.0.2 to 5.1.1 (#1524) by @dependabot - ref: upgrade linters to flake8 5.x (#1610) by @asottile-sentry - feat(profiling): Introduce different profiler schedulers (#1616) by @Zylphrex - fix(profiling): Check transaction sampled status before profiling (#1624) by @Zylphrex - Wrap Baggage ser/deser in capture_internal_exceptions (#1630) by @sl0thentr0py - Faster Tests (DjangoCon) (#1602) by @antonpirker - feat(profiling): Add support for profiles_sample_rate (#1613) by @Zylphrex - feat(profiling): Support for multithreaded profiles (#1570) by @Zylphrex ## 1.9.8 ### Various fixes & improvements - Baggage creation for head of trace (#1589) by @sl0thentr0py - The SDK now also generates new baggage entries for dynamic sampling when it is the first (head) SDK in the pipeline. ## 1.9.7 ### Various fixes & improvements - Let SentryAsgiMiddleware work with Starlette and FastAPI integrations (#1594) by @antonpirker **Note:** The last version 1.9.6 introduced a breaking change where projects that used Starlette or FastAPI and had manually setup `SentryAsgiMiddleware` could not start. This versions fixes this behaviour. With this version if you have a manual `SentryAsgiMiddleware` setup and are using Starlette or FastAPI everything just works out of the box. Sorry for any inconveniences the last version might have brought to you. We can do better and in the future we will do our best to not break your code again. ## 1.9.6 ### Various fixes & improvements - Auto-enable Starlette and FastAPI (#1533) by @antonpirker - Add more version constraints (#1574) by @isra17 - Fix typo in starlette attribute check (#1566) by @sl0thentr0py ## 1.9.5 ### Various fixes & improvements - fix(redis): import redis pipeline using full path (#1565) by @olksdr - Fix side effects for parallel tests (#1554) by @sl0thentr0py ## 1.9.4 ### Various fixes & improvements - Remove TRANSACTION_SOURCE_UNKNOWN and default to CUSTOM (#1558) by @sl0thentr0py - feat(redis): Add instrumentation for redis pipeline (#1543) by @jjbayer - Handle no release when uploading profiles (#1548) by @szokeasaurusrex ## 1.9.3 ### Various fixes & improvements - Wrap StarletteRequestExtractor in capture_internal_exceptions (#1551) by @sl0thentr0py ## 1.9.2 ### Various fixes & improvements - chore: remove quotes (#1545) by @vladanpaunovic ## 1.9.1 ### Various fixes & improvements - Fix FastAPI issues (#1532) ( #1514) (#1532) by @antonpirker - Add deprecation warning for 3.4, 3.5 (#1541) by @sl0thentr0py - Fast tests (#1504) by @antonpirker - Replace Travis CI badge with GitHub Actions badge (#1538) by @153957 - chore(deps): update urllib3 minimum version with environment markers (#1312) by @miketheman - Update Flask and Quart integrations (#1520) by @pgjones - chore: Remove ancient examples from tracing prototype (#1528) by @sl0thentr0py - fix(django): Send correct "url" transaction source if Django resolver fails to resolve (#1525) by @sl0thentr0py ## 1.9.0 ### Various fixes & improvements - feat(profiler): Add experimental profiler under experiments.enable_profiling (#1481) by @szokeasaurusrex - Fixed problem with broken response and python-multipart (#1516) by @antonpirker ## 1.8.0 ### Various fixes & improvements - feat(starlette): add Starlette integration (#1441) by @sl0thentr0py **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the Starlette integration. Usage: ```python from starlette.applications import Starlette from sentry_sdk.integrations.starlette import StarletteIntegration sentry_sdk.init( dsn="...", integrations=[StarletteIntegration()], ) app = Starlette(debug=True, routes=[...]) ``` - feat(fastapi): add FastAPI integration (#829) by @antonpirker **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the FastAPI integration. Usage: ```python from fastapi import FastAPI from sentry_sdk.integrations.starlette import StarletteIntegration from sentry_sdk.integrations.fastapi import FastApiIntegration sentry_sdk.init( dsn="...", integrations=[StarletteIntegration(), FastApiIntegration()], ) app = FastAPI() ``` Yes, you have to add both, the `StarletteIntegration` **AND** the `FastApiIntegration`! - fix: avoid sending empty Baggage header (#1507) by @intgr - fix: properly freeze Baggage object (#1508) by @intgr - docs: fix simple typo, collecter | collector (#1505) by @timgates42 ## 1.7.2 ### Various fixes & improvements - feat(transactions): Transaction Source (#1490) by @antonpirker - Removed (unused) sentry_timestamp header (#1494) by @antonpirker ## 1.7.1 ### Various fixes & improvements - Skip malformed baggage items (#1491) by @robyoung ## 1.7.0 ### Various fixes & improvements - feat(tracing): Dynamic Sampling Context / Baggage continuation (#1485) by @sl0thentr0py The SDK now propagates the [W3C Baggage Header](https://www.w3.org/TR/baggage/) from incoming transactions to outgoing requests. It also extracts Sentry specific [sampling information](https://develop.sentry.dev/sdk/performance/dynamic-sampling-context/) and adds it to the transaction headers to enable Dynamic Sampling in the product. ## 1.6.0 ### Various fixes & improvements - Fix Deployment (#1474) by @antonpirker - Serverless V2 (#1450) by @antonpirker - Use logging levelno instead of levelname. Levelnames can be overridden (#1449) by @rrauenza ## 1.5.12 ### Various fixes & improvements - feat(measurements): Add experimental set_measurement api on transaction (#1359) by @sl0thentr0py - fix: Remove incorrect usage from flask helper example (#1434) by @BYK ## 1.5.11 ### Various fixes & improvements - chore: Bump mypy and fix abstract ContextManager typing (#1421) by @sl0thentr0py - chore(issues): add link to Sentry support (#1420) by @vladanpaunovic - fix: replace git.io links with redirect targets (#1412) by @asottile-sentry - ref: Update error verbose for sentry init (#1361) by @targhs - fix(sessions): Update session also for non sampled events and change filter order (#1394) by @adinauer ## 1.5.10 ### Various fixes & improvements - Remove Flask version contraint (#1395) by @antonpirker - Change ordering of event drop mechanisms (#1390) by @adinauer ## 1.5.9 ### Various fixes & improvements - fix(sqlalchemy): Use context instead of connection in sqlalchemy integration (#1388) by @sl0thentr0py - Update correct test command in contributing docs (#1377) by @targhs - Update black (#1379) by @antonpirker - build(deps): bump sphinx from 4.1.1 to 4.5.0 (#1376) by @dependabot - fix: Auto-enabling Redis and Pyramid integration (#737) by @untitaker - feat(testing): Add pytest-watch (#853) by @lobsterkatie - Treat x-api-key header as sensitive (#1236) by @simonschmidt - fix: Remove obsolete MAX_FORMAT_PARAM_LENGTH (#1375) by @blueyed ## 1.5.8 ### Various fixes & improvements - feat(asgi): Add support for setting transaction name to path in FastAPI (#1349) by @tiangolo - fix(sqlalchemy): Change context manager type to avoid race in threads (#1368) by @Fofanko - fix(perf): Fix transaction setter on scope to use containing_transaction to match with getter (#1366) by @sl0thentr0py - chore(ci): Change stale GitHub workflow to run once a day (#1367) by @kamilogorek - feat(django): Make django middleware expose more wrapped attributes (#1202) by @MattFisher ## 1.5.7 ### Various fixes & improvements - fix(serializer): Make sentry_repr dunder method to avoid mock problems (#1364) by @sl0thentr0py ## 1.5.6 ### Various fixes & improvements - Create feature.yml (#1350) by @vladanpaunovic - Update contribution guide (#1346) by @antonpirker - chore: add bug issue template (#1345) by @vladanpaunovic - Added default value for auto_session_tracking (#1337) by @antonpirker - docs(readme): reordered content (#1343) by @antonpirker - fix(tests): Removed unsupported Django 1.6 from tests to avoid confusion (#1338) by @antonpirker - Group captured warnings under separate issues (#1324) by @mnito - build(changelogs): Use automated changelogs from Craft (#1340) by @BYK - fix(aiohttp): AioHttpIntegration sentry_app_handle() now ignores ConnectionResetError (#1331) by @cmalek - meta: Remove black GH action (#1339) by @sl0thentr0py - feat(flask): Add `sentry_trace()` template helper (#1336) by @BYK ## 1.5.5 - Add session tracking to ASGI integration (#1329) - Pinning test requirements versions (#1330) - Allow classes to short circuit serializer with `sentry_repr` (#1322) - Set default on json.dumps in compute_tracestate_value to ensure string conversion (#1318) Work in this release contributed by @tomchuk. Thank you for your contribution! ## 1.5.4 - Add Python 3.10 to test suite (#1309) - Capture only 5xx HTTP errors in Falcon Integration (#1314) - Attempt custom urlconf resolve in `got_request_exception` as well (#1317) ## 1.5.3 - Pick up custom urlconf set by Django middlewares from request if any (#1308) ## 1.5.2 - Record event_processor client reports #1281 - Add a Quart integration #1248 - Sanic v21.12 support #1292 - Support Celery abstract tasks #1287 Work in this release contributed by @johnzeringue, @pgjones and @ahopkins. Thank you for your contribution! ## 1.5.1 - Fix django legacy url resolver regex substitution due to upstream CVE-2021-44420 fix #1272 - Record lost `sample_rate` events only if tracing is enabled #1268 - Fix gevent version parsing for non-numeric parts #1243 - Record span and breadcrumb when Django opens db connection #1250 ## 1.5.0 - Also record client outcomes for before send #1211 - Add support for implicitly sized envelope items #1229 - Fix integration with Apache Beam 2.32, 2.33 #1233 - Remove Python 2.7 support for AWS Lambda layers in craft config #1241 - Refactor Sanic integration for v21.9 support #1212 - AWS Lambda Python 3.9 runtime support #1239 - Fix "shutdown_timeout" typing #1256 Work in this release contributed by @galuszkak, @kianmeng, @ahopkins, @razumeiko, @tomscytale, and @seedofjoy. Thank you for your contribution! ## 1.4.3 - Turned client reports on by default. ## 1.4.2 - Made envelope modifications in the HTTP transport non observable #1206 ## 1.4.1 - Fix race condition between `finish` and `start_child` in tracing #1203 ## 1.4.0 - No longer set the last event id for transactions #1186 - Added support for client reports (disabled by default for now) #1181 - Added `tracestate` header handling #1179 - Added real ip detection to asgi integration #1199 ## 1.3.1 - Fix detection of contextvars compatibility with Gevent versions >=20.9.0 #1157 ## 1.3.0 - Add support for Sanic versions 20 and 21 #1146 ## 1.2.0 - Fix for `AWSLambda` Integration to handle other path formats for function initial handler #1139 - Fix for worker to set daemon attribute instead of deprecated setDaemon method #1093 - Fix for `bottle` Integration that discards `-dev` for version extraction #1085 - Fix for transport that adds a unified hook for capturing metrics about dropped events #1100 - Add `Httpx` Integration #1119 - Add support for china domains in `AWSLambda` Integration #1051 ## 1.1.0 - Fix for `AWSLambda` integration returns value of original handler #1106 - Fix for `RQ` integration that only captures exception if RQ job has failed and ignore retries #1076 - Feature that supports Tracing for the `Tornado` integration #1060 - Feature that supports wild cards in `ignore_logger` in the `Logging` Integration #1053 - Fix for django that deals with template span description names that are either lists or tuples #1054 ## 1.0.0 This release contains a breaking change - **BREAKING CHANGE**: Feat: Moved `auto_session_tracking` experimental flag to a proper option and removed explicitly setting experimental `session_mode` in favor of auto detecting its value, hence enabling release health by default #994 - Fixed Django transaction name by setting the name to `request.path_info` rather than `request.path` - Fix for tracing by getting HTTP headers from span rather than transaction when possible #1035 - Fix for Flask transactions missing request body in non errored transactions #1034 - Fix for honoring the `X-Forwarded-For` header #1037 - Fix for worker that logs data dropping of events with level error #1032 ## 0.20.3 - Added scripts to support auto instrumentation of no code AWS lambda Python functions ## 0.20.2 - Fix incorrect regex in craft to include wheel file in pypi release ## 0.20.1 - Fix for error that occurs with Async Middlewares when the middleware is a function rather than a class ## 0.20.0 - Fix for header extraction for AWS lambda/API extraction - Fix multiple \*\*kwargs type hints # 967 - Fix that corrects AWS lambda integration failure to detect the aws-lambda-ric 1.0 bootstrap #976 - Fix AWSLambda integration: variable "timeout_thread" referenced before assignment #977 - Use full git sha as release name #960 - **BREAKING CHANGE**: The default environment is now production, not based on release - Django integration now creates transaction spans for template rendering - Fix headers not parsed correctly in ASGI middleware, Decode headers before creating transaction #984 - Restored ability to have tracing disabled #991 - Fix Django async views not behaving asynchronously - Performance improvement: supported pre-aggregated sessions ## 0.19.5 - Fix two regressions added in 0.19.2 with regard to sampling behavior when reading the sampling decision from headers. - Increase internal transport queue size and make it configurable. ## 0.19.4 - Fix a bug that would make applications crash if an old version of `boto3` was installed. ## 0.19.3 - Automatically pass integration-relevant data to `traces_sampler` for AWS, AIOHTTP, ASGI, Bottle, Celery, Django, Falcon, Flask, GCP, Pyramid, Tryton, RQ, and WSGI integrations - Fix a bug where the AWS integration would crash if event was anything besides a dictionary - Fix the Django integrations's ASGI handler for Channels 3.0. Thanks Luke Pomfrey! ## 0.19.2 - Add `traces_sampler` option. - The SDK now attempts to infer a default release from various environment variables and the current git repo. - Fix a crash with async views in Django 3.1. - Fix a bug where complex URL patterns in Django would create malformed transaction names. - Add options for transaction styling in AIOHTTP. - Add basic attachment support (documentation tbd). - fix a crash in the `pure_eval` integration. - Integration for creating spans from `boto3`. ## 0.19.1 - Fix dependency check for `blinker` fixes #858 - Fix incorrect timeout warnings in AWS Lambda and GCP integrations #854 ## 0.19.0 - Removed `_experiments.auto_enabling_integrations` in favor of just `auto_enabling_integrations` which is now enabled by default. ## 0.18.0 - **Breaking change**: The `no_proxy` environment variable is now honored when inferring proxy settings from the system. Thanks Xavier Fernandez! - Added Performance/Tracing support for AWS and GCP functions. - Fix an issue with Django instrumentation where the SDK modified `resolver_match.callback` and broke user code. ## 0.17.8 - Fix yet another bug with disjoint traces in Celery. - Added support for Chalice 1.20. Thanks again to the folks at Cuenca MX! ## 0.17.7 - Internal: Change data category for transaction envelopes. - Fix a bug under Celery 4.2+ that may have caused disjoint traces or missing transactions. ## 0.17.6 - Support for Flask 0.10 (only relaxing version check) ## 0.17.5 - Work around an issue in the Python stdlib that makes the entire process deadlock during garbage collection if events are sent from a `__del__` implementation. - Add possibility to wrap ASGI application twice in middleware to enable split up of request scope data and exception catching. ## 0.17.4 - New integration for the Chalice web framework for AWS Lambda. Thanks to the folks at Cuenca MX! ## 0.17.3 - Fix an issue with the `pure_eval` integration in interaction with trimming where `pure_eval` would create a lot of useless local variables that then drown out the useful ones in trimming. ## 0.17.2 - Fix timezone bugs in GCP integration. ## 0.17.1 - Fix timezone bugs in AWS Lambda integration. - Fix crash on GCP integration because of missing parameter `timeout_warning`. ## 0.17.0 - Fix a bug where class-based callables used as Django views (without using Django's regular class-based views) would not have `csrf_exempt` applied. - New integration for Google Cloud Functions. - Fix a bug where a recently released version of `urllib3` would cause the SDK to enter an infinite loop on networking and SSL errors. - **Breaking change**: Remove the `traceparent_v2` option. The option has been ignored since 0.16.3, just remove it from your code. ## 0.16.5 - Fix a bug that caused Django apps to crash if the view didn't have a `__name__` attribute. ## 0.16.4 - Add experiment to avoid trunchating span descriptions. Initialize with `init(_experiments={"smart_transaction_trimming": True})`. - Add a span around the Django view in transactions to distinguish its operations from middleware operations. ## 0.16.3 - Fix AWS Lambda support for Python 3.8. - The AWS Lambda integration now captures initialization/import errors for Python 3. - The AWS Lambda integration now supports an option to warn about functions likely to time out. - Testing for RQ 1.5 - Flip default of `traceparent_v2`. This change should have zero impact. The flag will be removed in 0.17. - Fix compatibility bug with Django 3.1. ## 0.16.2 - New (optional) integrations for richer stacktraces: `pure_eval` for additional variables, `executing` for better function names. ## 0.16.1 - Flask integration: Fix a bug that prevented custom tags from being attached to transactions. ## 0.16.0 - Redis integration: add tags for more commands - Redis integration: Patch rediscluster package if installed. - Session tracking: A session is no longer considered crashed if there has been a fatal log message (only unhandled exceptions count). - **Breaking change**: Revamping of the tracing API. - **Breaking change**: `before_send` is no longer called for transactions. ## 0.15.1 - Fix fatal crash in Pyramid integration on 404. ## 0.15.0 - **Breaking change:** The ASGI middleware will now raise an exception if contextvars are not available, like it is already the case for other asyncio integrations. - Contextvars are now used in more circumstances following a bugfix release of `gevent`. This will fix a few instances of wrong request data being attached to events while using an asyncio-based web framework. - APM: Fix a bug in the SQLAlchemy integration where a span was left open if the database transaction had to be rolled back. This could have led to deeply nested span trees under that db query span. - Fix a bug in the Pyramid integration where the transaction name could not be overridden at all. - Fix a broken type annotation on `capture_exception`. - Basic support for Django 3.1. More work is required for async middlewares to be instrumented properly for APM. ## 0.14.4 - Fix bugs in transport rate limit enforcement for specific data categories. The bug should not have affected anybody because we do not yet emit rate limits for specific event types/data categories. - Fix a bug in `capture_event` where it would crash if given additional kwargs. Thanks to Tatiana Vasilevskaya! - Fix a bug where contextvars from the request handler were inaccessible in AIOHTTP error handlers. - Fix a bug where the Celery integration would crash if newrelic instrumented Celery as well. ## 0.14.3 - Attempt to use a monotonic clock to measure span durations in Performance/APM. - Avoid overwriting explicitly set user data in web framework integrations. - Allow to pass keyword arguments to `capture_event` instead of configuring the scope. - Feature development for session tracking. ## 0.14.2 - Fix a crash in Django Channels instrumentation when SDK is reinitialized. - More contextual data for AWS Lambda (cloudwatch logs link). ## 0.14.1 - Fix a crash in the Django integration when used in combination with Django Rest Framework's test utilities for request. - Fix high memory consumption when sending a lot of errors in the same process. Particularly noticeable in async environments. ## 0.14.0 - Show ASGI request data in Django 3.0 - New integration for the Trytond ERP framework. Thanks n1ngu! ## 0.13.5 - Fix trace continuation bugs in APM. - No longer report `asyncio.CancelledError` as part of AIOHTTP integration. ## 0.13.4 - Fix package classifiers to mark this package as supporting Python 3.8. The SDK supported 3.8 before though. - Update schema sent for transaction events (transaction status). - Fix a bug where `None` inside request data was skipped/omitted. ## 0.13.3 - Fix an issue with the ASGI middleware that would cause Uvicorn to infer the wrong ASGI versions and call the wrapped application with the wrong argument count. - Do not ignore the `tornado.application` logger. - The Redis integration now instruments Redis blaster for breadcrumbs and transaction spans. ## 0.13.2 - Fix a bug in APM that would cause wrong durations to be displayed on non-UTC servers. ## 0.13.1 - Add new global functions for setting scope/context data. - Fix a bug that would make Django 1.11+ apps crash when using function-based middleware. ## 0.13.0 - Remove an old deprecation warning (behavior itself already changed since a long time). - The AIOHTTP integration now attaches the request body to crash reports. Thanks to Vitali Rebkavets! - Add an experimental PySpark integration. - First release to be tested under Python 3.8. No code changes were necessary though, so previous releases also might have worked. ## 0.12.3 - Various performance improvements to event sending. - Avoid crashes when scope or hub is racy. - Revert a change that broke applications using gevent and channels (in the same virtualenv, but different processes). - Fix a bug that made the SDK crash on unicode in SQL. ## 0.12.2 - Fix a crash with ASGI (Django Channels) when the ASGI request type is neither HTTP nor Websockets. ## 0.12.1 - Temporarily remove sending of SQL parameters (as part of breadcrumbs or spans for APM) to Sentry to avoid memory consumption issues. ## 0.12.0 - Sentry now has a [Discord server](https://discord.gg/cWnMQeA)! Join the server to get involved into SDK development and ask questions. - Fix a bug where the response object for httplib (or requests) was held onto for an unnecessarily long amount of time. - APM: Add spans for more methods on `subprocess.Popen` objects. - APM: Add spans for Django middlewares. - APM: Add spans for ASGI requests. - Automatically inject the ASGI middleware for Django Channels 2.0. This will **break your Channels 2.0 application if it is running on Python 3.5 or 3.6** (while previously it would "only" leak a lot of memory for each ASGI request). **Install `aiocontextvars` from PyPI to make it work again.** ## 0.11.2 - Fix a bug where the SDK would throw an exception on shutdown when running under eventlet. - Add missing data to Redis breadcrumbs. ## 0.11.1 - Remove a faulty assertion (observed in environment with Django Channels and ASGI). ## 0.11.0 - Fix type hints for the logging integration. Thanks Steven Dignam! - Fix an issue where scope/context data would leak in applications that use `gevent` with its threading monkeypatch. The fix is to avoid usage of contextvars in such environments. Thanks Ran Benita! - Fix a reference cycle in the `ThreadingIntegration` that led to exceptions on interpreter shutdown. Thanks Guang Tian Li! - Fix a series of bugs in the stdlib integration that broke usage of `subprocess`. - More instrumentation for APM. - New integration for SQLAlchemy (creates breadcrumbs from queries). - New (experimental) integration for Apache Beam. - Fix a bug in the `LoggingIntegration` that would send breadcrumbs timestamps in the wrong timezone. - The `AiohttpIntegration` now sets the event's transaction name. - Fix a bug that caused infinite recursion when serializing local variables that logged errors or otherwise created Sentry events. ## 0.10.2 - Fix a bug where a log record with non-strings as `extra` keys would make the SDK crash. - Added ASGI integration for better hub propagation, request data for your events and capturing uncaught exceptions. Using this middleware explicitly in your code will also fix a few issues with Django Channels. - Fix a bug where `celery-once` was deadlocking when used in combination with the celery integration. - Fix a memory leak in the new tracing feature when it is not enabled. ## 0.10.1 - Fix bug where the SDK would yield a deprecation warning about `collections.abc` vs `collections`. - Fix bug in stdlib integration that would cause spawned subprocesses to not inherit the environment variables from the parent process. ## 0.10.0 - Massive refactor in preparation to tracing. There are no intentional breaking changes, but there is a risk of breakage (hence the minor version bump). Two new client options `traces_sample_rate` and `traceparent_v2` have been added. Do not change the defaults in production, they will bring your application down or at least fill your Sentry project up with nonsense events. ## 0.9.5 - Do not use `getargspec` on Python 3 to evade deprecation warning. ## 0.9.4 - Revert a change in 0.9.3 that prevented passing a `unicode` string as DSN to `init()`. ## 0.9.3 - Add type hints for `init()`. - Include user agent header when sending events. ## 0.9.2 - Fix a bug in the Django integration that would prevent the user from initializing the SDK at the top of `settings.py`. This bug was introduced in 0.9.1 for all Django versions, but has been there for much longer for Django 1.6 in particular. ## 0.9.1 - Fix a bug on Python 3.7 where gunicorn with gevent would cause the SDK to leak event data between requests. - Fix a bug where the GNU backtrace integration would not parse certain frames. - Fix a bug where the SDK would not pick up request bodies for Django Rest Framework based apps. - Remove a few more headers containing sensitive data per default. - Various improvements to type hints. Thanks Ran Benita! - Add a event hint to access the log record from `before_send`. - Fix a bug that would ignore `__tracebackhide__`. Thanks Matt Millican! - Fix distribution information for mypy support (add `py.typed` file). Thanks Ran Benita! ## 0.9.0 - The SDK now captures `SystemExit` and other `BaseException`s when coming from within a WSGI app (Flask, Django, ...) - Pyramid: No longer report an exception if there exists an exception view for it. ## 0.8.1 - Fix infinite recursion bug in Celery integration. ## 0.8.0 - Add the always_run option in excepthook integration. - Fix performance issues when attaching large data to events. This is not really intended to be a breaking change, but this release does include a rewrite of a larger chunk of code, therefore the minor version bump. ## 0.7.14 - Fix crash when using Celery integration (`TypeError` when using `apply_async`). ## 0.7.13 - Fix a bug where `Ignore` raised in a Celery task would be reported to Sentry. - Add experimental support for tracing PoC. ## 0.7.12 - Read from `X-Real-IP` for user IP address. - Fix a bug that would not apply in-app rules for attached callstacks. - It's now possible to disable automatic proxy support by passing `http_proxy=""`. Thanks Marco Neumann! ## 0.7.11 - Fix a bug that would send `errno` in an invalid format to the server. - Fix import-time crash when running Python with `-O` flag. - Fix a bug that would prevent the logging integration from attaching `extra` keys called `data`. - Fix order in which exception chains are reported to match Raven behavior. - New integration for the Falcon web framework. Thanks to Jacob Magnusson! ## 0.7.10 - Add more event trimming. - Log Sentry's response body in debug mode. - Fix a few bad typehints causing issues in IDEs. - Fix a bug in the Bottle integration that would report HTTP exceptions (e.g. redirects) as errors. - Fix a bug that would prevent use of `in_app_exclude` without setting `in_app_include`. - Fix a bug where request bodies of Django Rest Framework apps were not captured. - Suppress errors during SQL breadcrumb capturing in Django integration. Also change order in which formatting strategies are tried. ## 0.7.9 - New integration for the Bottle web framework. Thanks to Stepan Henek! - Self-protect against broken mapping implementations and other broken reprs instead of dropping all local vars from a stacktrace. Thanks to Marco Neumann! ## 0.7.8 - Add support for Sanic versions 18 and 19. - Fix a bug that causes an SDK crash when using composed SQL from psycopg2. ## 0.7.7 - Fix a bug that would not capture request bodies if they were empty JSON arrays, objects or strings. - New GNU backtrace integration parses stacktraces from exception messages and appends them to existing stacktrace. - Capture Tornado formdata. - Support Python 3.6 in Sanic and AIOHTTP integration. - Clear breadcrumbs before starting a new request. - Fix a bug in the Celery integration that would drop pending events during worker shutdown (particularly an issue when running with `max_tasks_per_child = 1`) - Fix a bug with `repr`ing locals whose `__repr__` simultaneously changes the WSGI environment or other data that we're also trying to serialize at the same time. ## 0.7.6 - Fix a bug where artificial frames for Django templates would not be marked as in-app and would always appear as the innermost frame. Implement a heuristic to show template frame closer to `render` or `parse` invocation. ## 0.7.5 - Fix bug into Tornado integration that would send broken cookies to the server. - Fix a bug in the logging integration that would ignore the client option `with_locals`. ## 0.7.4 - Read release and environment from process environment like the Raven SDK does. The keys are called `SENTRY_RELEASE` and `SENTRY_ENVIRONMENT`. - Fix a bug in the `serverless` integration where it would not push a new scope for each function call (leaking tags and other things across calls). - Experimental support for type hints. ## 0.7.3 - Fix crash in AIOHTTP integration when integration was set up but disabled. - Flask integration now adds usernames, email addresses based on the protocol Flask-User defines on top of Flask-Login. - New threading integration catches exceptions from crashing threads. - New method `flush` on hubs and clients. New global `flush` function. - Add decorator for serverless functions to fix common problems in those environments. - Fix a bug in the logging integration where using explicit handlers required enabling the integration. ## 0.7.2 - Fix `celery.exceptions.Retry` spamming in Celery integration. ## 0.7.1 - Fix `UnboundLocalError` crash in Celery integration. ## 0.7.0 - Properly display chained exceptions (PEP-3134). - Rewrite celery integration to monkeypatch instead of using signals due to bugs in Celery 3's signal handling. The Celery scope is also now available in prerun and postrun signals. - Fix Tornado integration to work with Tornado 6. - Do not evaluate Django `QuerySet` when trying to capture local variables. Also an internal hook was added to overwrite `repr` for local vars. ## 0.6.9 - Second attempt at fixing the bug that was supposed to be fixed in 0.6.8. > No longer access arbitrary sequences in local vars due to possible side effects. ## 0.6.8 - No longer access arbitrary sequences in local vars due to possible side effects. ## 0.6.7 - Sourcecode Django templates is now displayed in stackframes like Jinja templates in Flask already were. - Updates to AWS Lambda integration for changes Amazon did to their Python 3.7 runtime. - Fix a bug in the AIOHTTP integration that would report 300s and other HTTP status codes as errors. - Fix a bug where a crashing `before_send` would crash the SDK and app. - Fix a bug where cyclic references in e.g. local variables or `extra` data would crash the SDK. ## 0.6.6 - Un-break API of internal `Auth` object that we use in Sentry itself. ## 0.6.5 - Capture WSGI request data eagerly to save memory and avoid issues with uWSGI. - Ability to use subpaths in DSN. - Ignore `django.request` logger. ## 0.6.4 - Fix bug that would lead to an `AssertionError: stack must have at least one layer`, at least in testsuites for Flask apps. ## 0.6.3 - New integration for Tornado - Fix request data in Django, Flask and other WSGI frameworks leaking between events. - Fix infinite recursion when sending more events in `before_send`. ## 0.6.2 - Fix crash in AWS Lambda integration when using Zappa. This only silences the error, the underlying bug is still in Zappa. ## 0.6.1 - New integration for aiohttp-server. - Fix crash when reading hostname in broken WSGI environments. ## 0.6.0 - Fix bug where a 429 without Retry-After would not be honored. - Fix bug where proxy setting would not fall back to `http_proxy` for HTTPs traffic. - A WSGI middleware is now available for catching errors and adding context about the current request to them. - Using `logging.debug("test", exc_info=True)` will now attach the current stacktrace if no `sys.exc_info` is available. - The Python 3.7 runtime for AWS Lambda is now supported. - Fix a bug that would drop an event or parts of it when it contained bytes that were not UTF-8 encoded. - Logging an exception will no longer add the exception as breadcrumb to the exception's own event. ## 0.5.5 - New client option `ca_certs`. - Fix crash with Django and psycopg2. ## 0.5.4 - Fix deprecation warning in relation to the `collections` stdlib module. - Fix bug that would crash Django and Flask when streaming responses are failing halfway through. ## 0.5.3 - Fix bug where using `push_scope` with a callback would not pop the scope. - Fix crash when initializing the SDK in `push_scope`. - Fix bug where IP addresses were sent when `send_default_pii=False`. ## 0.5.2 - Fix bug where events sent through the RQ integration were sometimes lost. - Remove a deprecation warning about usage of `logger.warn`. - Fix bug where large frame local variables would lead to the event being rejected by Sentry. ## 0.5.1 - Integration for Redis Queue (RQ) ## 0.5.0 - Fix a bug that would omit several debug logs during SDK initialization. - Fix issue that sent a event key `""` Sentry wouldn't understand. - **Breaking change:** The `level` and `event_level` options in the logging integration now work separately from each other. - Fix a bug in the Sanic integration that would report the exception behind any HTTP error code. - Fix a bug that would spam breadcrumbs in the Celery integration. Ignore logger `celery.worker.job`. - Additional attributes on log records are now put into `extra`. - Integration for Pyramid. - `sys.argv` is put into extra automatically. ## 0.4.3 - Fix a bug that would leak WSGI responses. ## 0.4.2 - Fix a bug in the Sanic integration that would leak data between requests. - Fix a bug that would hide all debug logging happening inside of the built-in transport. - Fix a bug that would report errors for typos in Django's shell. ## 0.4.1 - Fix bug that would only show filenames in stacktraces but not the parent directories. ## 0.4.0 - Changed how integrations are initialized. Integrations are now configured and enabled per-client. ## 0.3.11 - Fix issue with certain deployment tools and the AWS Lambda integration. ## 0.3.10 - Set transactions for Django like in Raven. Which transaction behavior is used can be configured. - Fix a bug which would omit frame local variables from stacktraces in Celery. - New option: `attach_stacktrace` ## 0.3.9 - Bugfixes for AWS Lambda integration: Using Zappa did not catch any exceptions. ## 0.3.8 - Nicer log level for internal errors. ## 0.3.7 - Remove `repos` configuration option. There was never a way to make use of this feature. - Fix a bug in `last_event_id`. - Add Django SQL queries to breadcrumbs. - Django integration won't set user attributes if they were already set. - Report correct SDK version to Sentry. ## 0.3.6 - Integration for Sanic ## 0.3.5 - Integration for AWS Lambda - Fix mojibake when encoding local variable values ## 0.3.4 - Performance improvement when storing breadcrumbs ## 0.3.3 - Fix crash when breadcrumbs had to be trunchated ## 0.3.2 - Fixed an issue where some paths where not properly sent as absolute paths sentry-python-2.18.0/CONTRIBUTING.md000066400000000000000000000250651471214654000167120ustar00rootroot00000000000000# Contributing to Sentry SDK for Python We welcome contributions to `sentry-python` by the community. This file outlines the process to contribute to the SDK itself. For contributing to the documentation, please see the [Contributing to Docs](https://docs.sentry.io/contributing/) page. ## How to Report a Problem Please search the [issue tracker](https://github.com/getsentry/sentry-python/issues) before creating a new issue (a problem or an improvement request). Please also ask in our [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr) before submitting a new issue. There are a ton of great people in our Discord community ready to help you! ## Submitting Changes - Fork the `sentry-python` repo and prepare your changes. - Add tests for your changes to `tests/`. - Run tests and make sure all of them pass. - Submit a pull request, referencing any issues your changes address. Please follow our [commit message format](https://develop.sentry.dev/commit-messages/#commit-message-format) when naming your pull request. We will review your pull request as soon as possible. Thank you for contributing! ## Development Environment ### Set up Python Make sure that you have Python 3 installed. Version 3.7 or higher is required to run style checkers on pre-commit. On macOS, we recommend using `brew` to install Python. For Windows, we recommend an official [python.org](https://www.python.org/downloads/) release. ### Fork and Clone the Repo Before you can contribute, you will need to [fork the `sentry-python` repository](https://github.com/getsentry/sentry-python/fork). Then, clone the forked repository to your local development environment. ### Create a Virtual Environment To keep your Python development environment and packages separate from the ones used by your operation system, create a [virtual environment](https://docs.python.org/3/tutorial/venv.html): ```bash cd sentry-python python -m venv .venv ``` Then, activate your virtual environment with the following command. You will need to repeat this step every time you wish to work on your changes for `sentry-python`. ```bash source .venv/bin/activate ``` ### Install `sentry-python` in Editable Mode Install `sentry-python` in [editable mode](https://pip.pypa.io/en/latest/topics/local-project-installs/#editable-installs). This will make any changes you make to the SDK code locally immediately effective without you having to reinstall or copy anything. ```bash pip install -e . ``` **Hint:** Sometimes you need a sample project to run your new changes to `sentry-python`. In this case install the sample project in the same virtualenv and you should be good to go. ### Install Coding Style Pre-commit Hooks This will make sure that your commits will have the correct coding style. ```bash cd sentry-python pip install -r requirements-devenv.txt pip install pre-commit pre-commit install ``` That's it. You should be ready to make changes, run tests, and make commits! If you experience any problems, please don't hesitate to ping us in our [Discord Community](https://discord.com/invite/Ww9hbqr). ## Running Tests You can run all tests with the following command: ```bash pytest tests/ ``` If you would like to run the tests for a specific integration, use a command similar to the one below: ```bash pytest -rs tests/integrations/flask/ # Replace "flask" with the specific integration you wish to test ``` **Hint:** Tests of integrations need additional dependencies. The switch `-rs` will show you why tests were skipped and what dependencies you need to install for the tests to run. (You can also consult the [tox.ini](tox.ini) file to see what dependencies are installed for each integration) ## Adding a New Integration 1. Write the integration. - Instrument all application instances by default. Prefer global signals/patches instead of configuring a specific instance. Don't make the user pass anything to your integration for anything to work. Aim for zero configuration. - Everybody monkeypatches. That means: - Make sure to think about conflicts with other monkeypatches when monkeypatching. - You don't need to feel bad about it. - Make sure your changes don't break end user contracts. The SDK should never alter the expected behavior of the underlying library or framework from the user's perspective and it shouldn't have any side effects. - Avoid modifying the hub, registering a new client or the like. The user drives the client, and the client owns integrations. - Allow the user to turn off the integration by changing the client. Check `Hub.current.get_integration(MyIntegration)` from within your signal handlers to see if your integration is still active before you do anything impactful (such as sending an event). 2. Write tests. - Consider the minimum versions supported, and test each version in a separate env in `tox.ini`. - Create a new folder in `tests/integrations/`, with an `__init__` file that skips the entire suite if the package is not installed. 3. Update package metadata. - We use `extras_require` in `setup.py` to communicate minimum version requirements for integrations. People can use this in combination with tools like Poetry or Pipenv to detect conflicts between our supported versions and their used versions programmatically. Do not set upper bounds on version requirements as people are often faster in adopting new versions of a web framework than we are in adding them to the test matrix or our package metadata. 4. Write the [docs](https://github.com/getsentry/sentry-docs). Follow the structure of [existing integration docs](https://docs.sentry.io/platforms/python/integrations/). And, please **make sure to add your integration to the table in `python/integrations/index.md`** (people often forget this step 🙂). 5. Merge docs after new version has been released. The docs are built and deployed after each merge, so your changes should go live in a few minutes. 6. (optional, if possible) Update data in [`sdk_updates.py`](https://github.com/getsentry/sentry/blob/master/src/sentry/sdk_updates.py) to give users in-app suggestions to use your integration. This step will only apply to some integrations. ## Releasing a New Version _(only relevant for Sentry employees)_ ### Prerequisites - All the changes that should be released must be on the `master` branch. - Every commit should follow the [Commit Message Format](https://develop.sentry.dev/commit-messages/#commit-message-format) convention. - CHANGELOG.md is updated automatically. No human intervention is necessary, but you might want to consider polishing the changelog by hand to make it more user friendly by grouping related things together, adding small code snippets and links to docs, etc. ### Manual Process - On GitHub in the `sentry-python` repository, go to "Actions" and select the "Release" workflow. - Click on "Run workflow" on the right side, and make sure the `master` branch is selected. - Set the "Version to release" input field. Here you decide if it is a major, minor or patch release. (See "Versioning Policy" below) - Click "Run Workflow". This will trigger [Craft](https://github.com/getsentry/craft) to prepare everything needed for a release. (For more information, see [craft prepare](https://github.com/getsentry/craft#craft-prepare-preparing-a-new-release).) At the end of this process a release issue is created in the [Publish](https://github.com/getsentry/publish) repository. (Example release issue: https://github.com/getsentry/publish/issues/815) Now one of the persons with release privileges (most probably your engineering manager) will review this issue and then add the `accepted` label to the issue. There are always two persons involved in a release. If you are in a hurry and the release should be out immediately, there is a Slack channel called `#proj-release-approval` where you can see your release issue and where you can ping people to please have a look immediately. When the release issue is labeled `accepted`, [Craft](https://github.com/getsentry/craft) is triggered again to publish the release to all the right platforms. (See [craft publish](https://github.com/getsentry/craft#craft-publish-publishing-the-release) for more information.) At the end of this process the release issue on GitHub will be closed and the release is completed! Congratulations! There is a sequence diagram visualizing all this in the [README.md](https://github.com/getsentry/publish) of the `Publish` repository. ### Versioning Policy This project follows [semver](https://semver.org/), with three additions: - Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice. - All undocumented APIs are considered internal. They are not part of this contract. - Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. We recommend to pin your version requirements against `2.x.*` or `2.x.y`. Either one of the following is fine: ``` sentry-sdk>=2.0.0,<3.0.0 sentry-sdk==2.4.0 ``` A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. ## Contributing to Sentry AWS Lambda Layer ### Development environment You need to have an AWS account and AWS CLI installed and setup. We put together two helper functions that can help you with development: - `./scripts/aws-deploy-local-layer.sh` This script [scripts/aws-deploy-local-layer.sh](scripts/aws-deploy-local-layer.sh) will take the code you have checked out locally, create a Lambda layer out of it and deploy it to the `eu-central-1` region of your configured AWS account using `aws` CLI. The Lambda layer will have the name `SentryPythonServerlessSDK-local-dev` - `./scripts/aws-attach-layer-to-lambda-function.sh` You can use this script [scripts/aws-attach-layer-to-lambda-function.sh](scripts/aws-attach-layer-to-lambda-function.sh) to attach the Lambda layer you just deployed (using the first script) onto one of your existing Lambda functions. You will have to give the name of the Lambda function to attach onto as an argument. (See the script for details.) With these two helper scripts it should be easy to rapidly iterate your development on the Lambda layer. sentry-python-2.18.0/LICENSE000066400000000000000000000021121471214654000154520ustar00rootroot00000000000000MIT License Copyright (c) 2018-2024 Functional Software, Inc. dba Sentry Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. sentry-python-2.18.0/MANIFEST.in000066400000000000000000000000541471214654000162060ustar00rootroot00000000000000include LICENSE include sentry_sdk/py.typed sentry-python-2.18.0/MIGRATION_GUIDE.md000066400000000000000000000224331471214654000172450ustar00rootroot00000000000000# Sentry SDK 2.0 Migration Guide Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of what's changed. Looking for a more digestable summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/1.x-to-2.x) with the most common migration patterns. ## New Features - Additional integrations will now be activated automatically if the SDK detects the respective package is installed: Ariadne, ARQ, asyncpg, Chalice, clickhouse-driver, GQL, Graphene, huey, Loguru, PyMongo, Quart, Starlite, Strawberry. - While refactoring the [inner workings](https://docs.sentry.io/platforms/python/enriching-events/scopes/) of the SDK we added new top-level APIs for custom instrumentation called `new_scope` and `isolation_scope`. See the [Deprecated](#deprecated) section to see how they map to the existing APIs. ## Changed - The Pyramid integration will not capture errors that might happen in `authenticated_userid()` in a custom `AuthenticationPolicy` class. - The method `need_code_loation` of the `MetricsAggregator` was renamed to `need_code_location`. - The `BackgroundWorker` thread used to process events was renamed from `raven-sentry.BackgroundWorker` to `sentry-sdk.BackgroundWorker`. - The `reraise` function was moved from `sentry_sdk._compat` to `sentry_sdk.utils`. - The `_ScopeManager` was moved from `sentry_sdk.hub` to `sentry_sdk.scope`. - The signature for the metrics callback function set with `before_emit_metric` has changed from `before_emit_metric(key, tags)` to `before_emit_metric(key, value, unit, tags)` - Moved the contents of `tracing_utils_py3.py` to `tracing_utils.py`. The `start_child_span_decorator` is now in `sentry_sdk.tracing_utils`. - The actual implementation of `get_current_span` was moved to `sentry_sdk.tracing_utils`. `sentry_sdk.get_current_span` is still accessible as part of the top-level API. - `sentry_sdk.tracing_utils.add_query_source()`: Removed the `hub` parameter. It is not necessary anymore. - `sentry_sdk.tracing_utils.record_sql_queries()`: Removed the `hub` parameter. It is not necessary anymore. - `sentry_sdk.tracing_utils.get_current_span()` does now take a `scope` instead of a `hub` as parameter. - `sentry_sdk.tracing_utils.should_propagate_trace()` now takes a `Client` instead of a `Hub` as first parameter. - `sentry_sdk.utils.is_sentry_url()` now takes a `Client` instead of a `Hub` as first parameter. - `sentry_sdk.utils._get_contextvars` does not return a tuple with three values, but a tuple with two values. The `copy_context` was removed. - You no longer have to use `configure_scope` to mutate a transaction. Instead, you simply get the current scope to mutate the transaction. Here is a recipe on how to change your code to make it work: Your existing implementation: ```python transaction = sentry_sdk.transaction(...) # later in the code execution: with sentry_sdk.configure_scope() as scope: scope.set_transaction_name("new-transaction-name") ``` needs to be changed to this: ```python transaction = sentry_sdk.transaction(...) # later in the code execution: scope = sentry_sdk.get_current_scope() scope.set_transaction_name("new-transaction-name") ``` - The classes listed in the table below are now abstract base classes. Therefore, they can no longer be instantiated. Subclasses can only be instantiated if they implement all of the abstract methods.
Show table | Class | Abstract methods | | ------------------------------------- | -------------------------------------- | | `sentry_sdk.integrations.Integration` | `setup_once` | | `sentry_sdk.metrics.Metric` | `add`, `serialize_value`, and `weight` | | `sentry_sdk.profiler.Scheduler` | `setup` and `teardown` | | `sentry_sdk.transport.Transport` | `capture_envelope` |
## Removed - Removed support for Python 2 and Python 3.5. The SDK now requires at least Python 3.6. - Removed support for Celery 3.\*. - Removed support for Django 1.8, 1.9, 1.10. - Removed support for Flask 0.\*. - Removed support for gRPC < 1.39. - Removed support for Tornado < 6. - Removed support for sending events to the `/store` endpoint. Everything is now sent to the `/envelope` endpoint. If you're on SaaS you don't have to worry about this, but if you're running Sentry yourself you'll need version `20.6.0` or higher of self-hosted Sentry. - The deprecated `with_locals` configuration option was removed. Use `include_local_variables` instead. See https://docs.sentry.io/platforms/python/configuration/options/#include-local-variables. - The deprecated `request_bodies` configuration option was removed. Use `max_request_body_size`. See https://docs.sentry.io/platforms/python/configuration/options/#max-request-body-size. - Removed support for `user.segment`. It was also removed from the trace header as well as from the dynamic sampling context. - Removed support for the `install` method for custom integrations. Please use `setup_once` instead. - Removed `sentry_sdk.tracing.Span.new_span`. Use `sentry_sdk.tracing.Span.start_child` instead. - Removed `sentry_sdk.tracing.Transaction.new_span`. Use `sentry_sdk.tracing.Transaction.start_child` instead. - Removed support for creating transactions via `sentry_sdk.tracing.Span(transaction=...)`. To create a transaction, please use `sentry_sdk.tracing.Transaction(name=...)`. - Removed `sentry_sdk.utils.Auth.store_api_url`. - `sentry_sdk.utils.Auth.get_api_url`'s now accepts a `sentry_sdk.consts.EndpointType` enum instead of a string as its only parameter. We recommend omitting this argument when calling the function, since the parameter's default value is the only possible `sentry_sdk.consts.EndpointType` value. The parameter exists for future compatibility. - Removed `tracing_utils_py2.py`. The `start_child_span_decorator` is now in `sentry_sdk.tracing_utils`. - Removed the `sentry_sdk.profiler.Scheduler.stop_profiling` method. Any calls to this method can simply be removed, since this was a no-op method. - Removed the experimental `metrics_summary_sample_rate` config option. - Removed the experimental `should_summarize_metric` config option. ## Deprecated - Using the `Hub` directly as well as using hub-based APIs has been deprecated. Where available, use [the top-level API instead](sentry_sdk/api.py); otherwise use the [scope API](sentry_sdk/scope.py) or the [client API](sentry_sdk/client.py). Before: ```python with hub.start_span(...): # do something ``` After: ```python import sentry_sdk with sentry_sdk.start_span(...): # do something ``` - Hub cloning is deprecated. Before: ```python with Hub(Hub.current) as hub: # do something with the cloned hub ``` After: ```python import sentry_sdk with sentry_sdk.isolation_scope() as scope: # do something with the forked scope ``` - `configure_scope` is deprecated. Modify the current or isolation scope directly instead. Before: ```python with configure_scope() as scope: # do something with `scope` ``` After: ```python from sentry_sdk import get_current_scope scope = get_current_scope() # do something with `scope` ``` Or: ```python from sentry_sdk import get_isolation_scope scope = get_isolation_scope() # do something with `scope` ``` When to use `get_current_scope()` and `get_isolation_scope()` depends on how long the change to the scope should be in effect. If you want the changed scope to affect the whole request-response cycle or the whole execution of task, use the isolation scope. If it's more localized, use the current scope. - `push_scope` is deprecated. Fork the current or the isolation scope instead. Before: ```python with push_scope() as scope: # do something with `scope` ``` After: ```python import sentry_sdk with sentry_sdk.new_scope() as scope: # do something with `scope` ``` Or: ```python import sentry_sdk with sentry_sdk.isolation_scope() as scope: # do something with `scope` ``` `new_scope()` will fork the current scope, while `isolation_scope()` will fork the isolation scope. The lifecycle of a single isolation scope roughly translates to the lifecycle of a transaction in most cases, so if you're looking to create a new separated scope for a whole request-response cycle or task execution, go for `isolation_scope()`. If you want to wrap a smaller unit code, fork the current scope instead with `new_scope()`. - Accessing the client via the hub has been deprecated. Use the top-level `sentry_sdk.get_client()` to get the current client. - `profiler_mode` and `profiles_sample_rate` have been deprecated as `_experiments` options. Use them as top level options instead: ```python sentry_sdk.init( ..., profiler_mode="thread", profiles_sample_rate=1.0, ) ``` - Deprecated `sentry_sdk.transport.Transport.capture_event`. Please use `sentry_sdk.transport.Transport.capture_envelope`, instead. - Passing a function to `sentry_sdk.init`'s `transport` keyword argument has been deprecated. If you wish to provide a custom transport, please pass a `sentry_sdk.transport.Transport` instance or a subclass. - The parameter `propagate_hub` in `ThreadingIntegration()` was deprecated and renamed to `propagate_scope`. sentry-python-2.18.0/Makefile000066400000000000000000000016721471214654000161170ustar00rootroot00000000000000SHELL = /bin/bash VENV_PATH = .venv help: @echo "Thanks for your interest in the Sentry Python SDK!" @echo @echo "make apidocs: Build the API documentation" @echo "make aws-lambda-layer: Build AWS Lambda layer directory for serverless integration" @echo @echo "Also make sure to read ./CONTRIBUTING.md" @echo @false .venv: python -m venv $(VENV_PATH) $(VENV_PATH)/bin/pip install tox dist: .venv rm -rf dist dist-serverless build $(VENV_PATH)/bin/pip install wheel setuptools $(VENV_PATH)/bin/python setup.py sdist bdist_wheel .PHONY: dist apidocs: .venv @$(VENV_PATH)/bin/pip install --editable . @$(VENV_PATH)/bin/pip install -U -r ./requirements-docs.txt rm -rf docs/_build @$(VENV_PATH)/bin/sphinx-build -vv -W -b html docs/ docs/_build .PHONY: apidocs aws-lambda-layer: dist $(VENV_PATH)/bin/pip install -r requirements-aws-lambda-layer.txt $(VENV_PATH)/bin/python -m scripts.build_aws_lambda_layer .PHONY: aws-lambda-layer sentry-python-2.18.0/README.md000066400000000000000000000123111471214654000157260ustar00rootroot00000000000000 Sentry for Python _Bad software is everywhere, and we're tired of it. Sentry is on a mission to help developers write better software faster, so we can get back to enjoying technology. If you want to join us, [**check out our open positions**](https://sentry.io/careers/)_. # Official Sentry SDK for Python [![Build Status](https://github.com/getsentry/sentry-python/actions/workflows/ci.yml/badge.svg)](https://github.com/getsentry/sentry-python/actions/workflows/ci.yml) [![PyPi page link -- version](https://img.shields.io/pypi/v/sentry-sdk.svg)](https://pypi.python.org/pypi/sentry-sdk) [![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/cWnMQeA) Welcome to the official Python SDK for **[Sentry](http://sentry.io/)**! ## Getting Started ### Installation Getting Sentry into your project is straightforward. Just run this command in your terminal: ```bash pip install --upgrade sentry-sdk ``` ### Basic Configuration Here’s a quick configuration example to get Sentry up and running: ```python import sentry_sdk sentry_sdk.init( "https://12927b5f211046b575ee51fd8b1ac34f@o1.ingest.sentry.io/1", # Your DSN here # Set traces_sample_rate to 1.0 to capture 100% # of transactions for performance monitoring. traces_sample_rate=1.0, ) ``` With this configuration, Sentry will monitor for exceptions and performance issues. ### Quick Usage Example To generate some events that will show up in Sentry, you can log messages or capture errors: ```python from sentry_sdk import capture_message capture_message("Hello Sentry!") # You'll see this in your Sentry dashboard. raise ValueError("Oops, something went wrong!") # This will create an error event in Sentry. ``` #### Explore the Docs For more details on advanced usage, integrations, and customization, check out the full documentation: - [Official SDK Docs](https://docs.sentry.io/platforms/python/) - [API Reference](https://getsentry.github.io/sentry-python/) ## Integrations Sentry integrates with many popular Python libraries and frameworks, including: - [Django](https://docs.sentry.io/platforms/python/integrations/django/) - [Flask](https://docs.sentry.io/platforms/python/integrations/flask/) - [FastAPI](https://docs.sentry.io/platforms/python/integrations/fastapi/) - [Celery](https://docs.sentry.io/platforms/python/integrations/celery/) - [AWS Lambda](https://docs.sentry.io/platforms/python/integrations/aws-lambda/) Want more? [Check out the full list of integrations](https://docs.sentry.io/platforms/python/integrations/). ### Rolling Your Own Integration? If you want to create a new integration or improve an existing one, we’d welcome your contributions! Please read our [contributing guide](https://github.com/getsentry/sentry-python/blob/master/CONTRIBUTING.md) before starting. ## Migrating Between Versions? ### From `1.x` to `2.x` If you're using the older `1.x` version of the SDK, now's the time to upgrade to `2.x`. It includes significant upgrades and new features. Check our [migration guide](https://docs.sentry.io/platforms/python/migration/1.x-to-2.x) for assistance. ### From `raven-python` Using the legacy `raven-python` client? It's now in maintenance mode, and we recommend migrating to the new SDK for an improved experience. Get all the details in our [migration guide](https://docs.sentry.io/platforms/python/migration/raven-to-sentry-sdk/). ## Want to Contribute? We’d love your help in improving the Sentry SDK! Whether it’s fixing bugs, adding features, or enhancing documentation, every contribution is valuable. For details on how to contribute, please check out [CONTRIBUTING.md](CONTRIBUTING.md) and explore the [open issues](https://github.com/getsentry/sentry-python/issues). ## Need Help? If you encounter issues or need help setting up or configuring the SDK, don’t hesitate to reach out to the [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr). There is a ton of great people there ready to help! ## Resources Here are additional resources to help you make the most of Sentry: - [![Documentation](https://img.shields.io/badge/documentation-sentry.io-green.svg)](https://docs.sentry.io/quickstart/) – Official documentation to get started. - [![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/Ww9hbqr) – Join our Discord community. - [![Twitter Follow](https://img.shields.io/twitter/follow/getsentry?label=getsentry&style=social)](https://twitter.com/intent/follow?screen_name=getsentry) – Follow us on X (Twitter) for updates. - [![Stack Overflow](https://img.shields.io/badge/stack%20overflow-sentry-green.svg)](http://stackoverflow.com/questions/tagged/sentry) – Questions and answers related to Sentry. ## License The SDK is open-source and available under the MIT license. Check out the [LICENSE](LICENSE) file for more information. --- Thanks to everyone who has helped improve the SDK! sentry-python-2.18.0/checkouts/000077500000000000000000000000001471214654000164415ustar00rootroot00000000000000sentry-python-2.18.0/checkouts/data-schemas/000077500000000000000000000000001471214654000207735ustar00rootroot00000000000000sentry-python-2.18.0/codecov.yml000066400000000000000000000015311471214654000166160ustar00rootroot00000000000000coverage: status: project: default: target: auto # auto compares coverage to the previous base commit threshold: 10% # this allows a 10% drop from the previous base commit coverage informational: true ignore: - "tests" - "sentry_sdk/_types.py" # Read more here: https://docs.codecov.com/docs/pull-request-comments comment: after_n_builds: 99 layout: 'diff, files' # Update, if comment exists. Otherwise post new. behavior: default # Comments will only post when coverage changes. Furthermore, if a comment # already exists, and a newer commit results in no coverage change for the # entire pull, the comment will be deleted. require_changes: true require_base: true # must have a base report to post require_head: true # must have a head report to post github_checks: annotations: falsesentry-python-2.18.0/docs/000077500000000000000000000000001471214654000154015ustar00rootroot00000000000000sentry-python-2.18.0/docs/.gitignore000066400000000000000000000000071471214654000173660ustar00rootroot00000000000000_build sentry-python-2.18.0/docs/_static/000077500000000000000000000000001471214654000170275ustar00rootroot00000000000000sentry-python-2.18.0/docs/_static/.gitkeep000066400000000000000000000000001471214654000204460ustar00rootroot00000000000000sentry-python-2.18.0/docs/api.rst000066400000000000000000000025561471214654000167140ustar00rootroot00000000000000============= Top Level API ============= This is the user facing API of the SDK. It's exposed as ``sentry_sdk``. With this API you can implement a custom performance monitoring or error reporting solution. Capturing Data ============== .. autofunction:: sentry_sdk.api.capture_event .. autofunction:: sentry_sdk.api.capture_exception .. autofunction:: sentry_sdk.api.capture_message Enriching Events ================ .. autofunction:: sentry_sdk.api.add_breadcrumb .. autofunction:: sentry_sdk.api.set_context .. autofunction:: sentry_sdk.api.set_extra .. autofunction:: sentry_sdk.api.set_level .. autofunction:: sentry_sdk.api.set_tag .. autofunction:: sentry_sdk.api.set_user Performance Monitoring ====================== .. autofunction:: sentry_sdk.api.continue_trace .. autofunction:: sentry_sdk.api.get_current_span .. autofunction:: sentry_sdk.api.start_span .. autofunction:: sentry_sdk.api.start_transaction Distributed Tracing =================== .. autofunction:: sentry_sdk.api.get_baggage .. autofunction:: sentry_sdk.api.get_traceparent Client Management ================= .. autofunction:: sentry_sdk.api.is_initialized .. autofunction:: sentry_sdk.api.get_client Managing Scope (advanced) ========================= .. autofunction:: sentry_sdk.api.configure_scope .. autofunction:: sentry_sdk.api.push_scope .. autofunction:: sentry_sdk.api.new_scope sentry-python-2.18.0/docs/apidocs.rst000066400000000000000000000017441471214654000175630ustar00rootroot00000000000000======== API Docs ======== .. autoclass:: sentry_sdk.Hub :members: .. autoclass:: sentry_sdk.Scope :members: .. autoclass:: sentry_sdk.Client :members: .. autoclass:: sentry_sdk.client.BaseClient :members: .. autoclass:: sentry_sdk.client.NonRecordingClient :members: .. autoclass:: sentry_sdk.client._Client :members: .. autoclass:: sentry_sdk.Transport :members: .. autoclass:: sentry_sdk.HttpTransport :members: .. autoclass:: sentry_sdk.tracing.Transaction :members: .. autoclass:: sentry_sdk.tracing.Span :members: .. autoclass:: sentry_sdk.profiler.transaction_profiler.Profile :members: .. autoclass:: sentry_sdk.session.Session :members: .. autoclass:: sentry_sdk.attachments.Attachment :members: .. autoclass:: sentry_sdk.scrubber.EventScrubber :members: .. autoclass:: sentry_sdk.monitor.Monitor :members: .. autoclass:: sentry_sdk.envelope.Envelope :members: .. autoclass:: sentry_sdk.envelope.Item :members: sentry-python-2.18.0/docs/conf.py000066400000000000000000000131471471214654000167060ustar00rootroot00000000000000import os import sys import typing from datetime import datetime # prevent circular imports import sphinx.builders.html import sphinx.builders.latex import sphinx.builders.texinfo import sphinx.builders.text import sphinx.domains.c # noqa: F401 import sphinx.domains.cpp # noqa: F401 import sphinx.ext.autodoc # noqa: F401 import sphinx.ext.intersphinx # noqa: F401 import urllib3.exceptions # noqa: F401 typing.TYPE_CHECKING = True # # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/master/config sys.path.insert(0, os.path.abspath("..")) # -- Project information ----------------------------------------------------- project = "sentry-python" copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year) author = "Sentry Team and Contributors" release = "2.18.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.autodoc", "sphinx_autodoc_typehints", "sphinx.ext.viewcode", "sphinx.ext.githubpages", "sphinx.ext.intersphinx", ] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = ".rst" # The master toctree document. master_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # on_rtd = os.environ.get("READTHEDOCS", None) == "True" html_theme = "shibuya" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { "github_url": "https://github.com/getsentry/sentry-python", } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = "sentry-pythondoc" # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ( master_doc, "sentry-python.tex", "sentry-python Documentation", "Sentry Team and Contributors", "manual", ) ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "sentry-python", "sentry-python Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "sentry-python", "sentry-python Documentation", author, "sentry-python", "The official Sentry SDK for Python.", "Miscellaneous", ) ] # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} sentry-python-2.18.0/docs/index.rst000066400000000000000000000005701471214654000172440ustar00rootroot00000000000000===================================== sentry-python - Sentry SDK for Python ===================================== This is the API documentation for `Sentry's Python SDK `_. For full documentation and other resources visit the `GitHub repository `_. .. toctree:: api integrations apidocs sentry-python-2.18.0/docs/integrations.rst000066400000000000000000000003121471214654000206350ustar00rootroot00000000000000============ Integrations ============ TBD Logging ======= .. module:: sentry_sdk.integrations.logging .. autofunction:: ignore_logger .. autoclass:: EventHandler .. autoclass:: BreadcrumbHandler sentry-python-2.18.0/mypy.ini000066400000000000000000000041521471214654000161520ustar00rootroot00000000000000[mypy] python_version = 3.11 allow_redefinition = True check_untyped_defs = True ; disallow_any_decorated = True ; disallow_any_explicit = True ; disallow_any_expr = True disallow_any_generics = True ; disallow_any_unimported = True disallow_incomplete_defs = True disallow_subclassing_any = True ; disallow_untyped_calls = True disallow_untyped_decorators = True disallow_untyped_defs = True no_implicit_optional = True strict_equality = True strict_optional = True warn_redundant_casts = True ; warn_return_any = True warn_unused_configs = True warn_unused_ignores = True ; Relaxations for code written before mypy was introduced ; ; Do not use wildcards in module paths, otherwise added modules will ; automatically have the same set of relaxed rules as the rest [mypy-cohere.*] ignore_missing_imports = True [mypy-django.*] ignore_missing_imports = True [mypy-pyramid.*] ignore_missing_imports = True [mypy-psycopg2.*] ignore_missing_imports = True [mypy-pytest.*] ignore_missing_imports = True [mypy-aiohttp.*] ignore_missing_imports = True [mypy-anthropic.*] ignore_missing_imports = True [mypy-sanic.*] ignore_missing_imports = True [mypy-tornado.*] ignore_missing_imports = True [mypy-fakeredis.*] ignore_missing_imports = True [mypy-rq.*] ignore_missing_imports = True [mypy-pyspark.*] ignore_missing_imports = True [mypy-asgiref.*] ignore_missing_imports = True [mypy-langchain_core.*] ignore_missing_imports = True [mypy-executing.*] ignore_missing_imports = True [mypy-asttokens.*] ignore_missing_imports = True [mypy-pure_eval.*] ignore_missing_imports = True [mypy-blinker.*] ignore_missing_imports = True [mypy-sentry_sdk._queue] ignore_missing_imports = True disallow_untyped_defs = False [mypy-sentry_sdk._lru_cache] disallow_untyped_defs = False [mypy-celery.app.trace] ignore_missing_imports = True [mypy-flask.signals] ignore_missing_imports = True [mypy-huey.*] ignore_missing_imports = True [mypy-openai.*] ignore_missing_imports = True [mypy-openfeature.*] ignore_missing_imports = True [mypy-huggingface_hub.*] ignore_missing_imports = True [mypy-arq.*] ignore_missing_imports = True [mypy-grpc.*] ignore_missing_imports = True sentry-python-2.18.0/pyproject.toml000066400000000000000000000010531471214654000173640ustar00rootroot00000000000000[tool.black] # 'extend-exclude' excludes files or directories in addition to the defaults extend-exclude = ''' # A regex preceded with ^/ will apply only to files and directories # in the root of the project. ( .*_pb2.py # exclude autogenerated Protocol Buffer files anywhere in the project | .*_pb2_grpc.py # exclude autogenerated Protocol Buffer files anywhere in the project ) ''' [tool.coverage.run] branch = true omit = [ "/tmp/*", "*/tests/*", "*/.venv/*", ] [tool.coverage.report] exclude_also = [ "if TYPE_CHECKING:", ]sentry-python-2.18.0/pytest.ini000066400000000000000000000006301471214654000165010ustar00rootroot00000000000000[pytest] addopts = -vvv -rfEs -s --durations=5 --cov=./sentry_sdk --cov-branch --cov-report= --tb=short --junitxml=.junitxml asyncio_mode = strict markers = tests_internal_exceptions: Handle internal exceptions just as the SDK does, to test it. (Otherwise internal exceptions are recorded and reraised.) [pytest-watch] verbose = True nobeep = True ; Enable this to drop into pdb on errors ; pdb = True sentry-python-2.18.0/requirements-aws-lambda-layer.txt000066400000000000000000000003361471214654000230570ustar00rootroot00000000000000certifi # In Lambda functions botocore is used, and botocore is not # yet supporting urllib3 1.27.0 never mind 2+. # So we pin this here to make our Lambda layer work with # Lambda Function using Python 3.7+ urllib3<1.27 sentry-python-2.18.0/requirements-devenv.txt000066400000000000000000000002541471214654000212230ustar00rootroot00000000000000-r requirements-linting.txt -r requirements-testing.txt mockupdb # required by `pymongo` tests that are enabled by `pymongo` from linter requirements pytest pytest-asyncio sentry-python-2.18.0/requirements-docs.txt000066400000000000000000000001271471214654000206630ustar00rootroot00000000000000gevent shibuya sphinx sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions sentry-python-2.18.0/requirements-linting.txt000066400000000000000000000006561471214654000214060ustar00rootroot00000000000000mypy black flake8==5.0.4 # flake8 depends on pyflakes>=3.0.0 and this dropped support for Python 2 "# type:" comments types-certifi types-protobuf types-gevent types-greenlet types-redis types-setuptools types-webob opentelemetry-distro pymongo # There is no separate types module. loguru # There is no separate types module. flake8-bugbear pep8-naming pre-commit # local linting httpcore openfeature-sdk launchdarkly-server-sdk sentry-python-2.18.0/requirements-testing.txt000066400000000000000000000002521471214654000214070ustar00rootroot00000000000000pip pytest pytest-cov pytest-forked pytest-localserver pytest-watch jsonschema pyrsistent executing asttokens responses pysocks socksio httpcore[http2] setuptools Brotli sentry-python-2.18.0/scripts/000077500000000000000000000000001471214654000161405ustar00rootroot00000000000000sentry-python-2.18.0/scripts/aws-attach-layer-to-lambda-function.sh000077500000000000000000000017521471214654000253330ustar00rootroot00000000000000#!/usr/bin/env bash # # Attaches the layer `SentryPythonServerlessSDK-local-dev` to a given lambda function. # set -euo pipefail # Check for argument if [ $# -eq 0 ] then SCRIPT_NAME=$(basename "$0") echo "ERROR: No argument supplied. Please give the name of a Lambda function!" echo "" echo "Usage: $SCRIPT_NAME " echo "" exit 1 fi FUNCTION_NAME=$1 echo "Getting ARN of newest Sentry lambda layer..." LAYER_ARN=$(aws lambda list-layer-versions --layer-name SentryPythonServerlessSDK-local-dev --query "LayerVersions[0].LayerVersionArn" | tr -d '"') echo "Done getting ARN of newest Sentry lambda layer $LAYER_ARN." echo "Attaching Lamba layer to function $FUNCTION_NAME..." echo "Warning: This remove all other layers!" aws lambda update-function-configuration \ --function-name "$FUNCTION_NAME" \ --layers "$LAYER_ARN" \ --no-cli-pager echo "Done attaching Lamba layer to function '$FUNCTION_NAME'." echo "All done. Have a nice day!" sentry-python-2.18.0/scripts/aws-cleanup.sh000077500000000000000000000012011471214654000207100ustar00rootroot00000000000000#!/bin/sh # # Helper script to clean up AWS Lambda functions created # by the test suite (tests/integrations/aws_lambda/test_aws.py). # # This will delete all Lambda functions named `test_function_*`. # export AWS_DEFAULT_REGION="us-east-1" export AWS_ACCESS_KEY_ID="$SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID" export AWS_SECRET_ACCESS_KEY="$SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY" for func in $(aws lambda list-functions --output text --query 'Functions[?starts_with(FunctionName, `test_`) == `true`].FunctionName'); do echo "Deleting $func" aws lambda delete-function --function-name "$func" done echo "All done! Have a nice day!" sentry-python-2.18.0/scripts/aws-delete-lamba-layer-versions.sh000077500000000000000000000007201471214654000245620ustar00rootroot00000000000000#!/usr/bin/env bash # # Deletes all versions of the layer specified in LAYER_NAME in one region. # set -euo pipefail # override default AWS region export AWS_REGION=eu-central-1 LAYER_NAME=SentryPythonServerlessSDK-local-dev VERSION="0" while [[ $VERSION != "1" ]] do VERSION=$(aws lambda list-layer-versions --layer-name $LAYER_NAME | jq '.LayerVersions[0].Version') aws lambda delete-layer-version --layer-name $LAYER_NAME --version-number $VERSION done sentry-python-2.18.0/scripts/aws-deploy-local-layer.sh000077500000000000000000000020511471214654000227630ustar00rootroot00000000000000#!/usr/bin/env bash # # Builds and deploys the Sentry AWS Lambda layer (including the Sentry SDK and the Sentry Lambda Extension) # # The currently checked out version of the SDK in your local directory is used. # The latest version of the Lambda Extension is fetched from the Sentry Release Registry. # set -euo pipefail # Creating Lambda layer echo "Creating Lambda layer in ./dist ..." make aws-lambda-layer echo "Done creating Lambda layer in ./dist" # Deploying zipped Lambda layer to AWS ZIP=$(ls dist | grep serverless | head -n 1) echo "Deploying zipped Lambda layer $ZIP to AWS..." aws lambda publish-layer-version \ --layer-name "SentryPythonServerlessSDK-local-dev" \ --region "eu-central-1" \ --zip-file "fileb://dist/$ZIP" \ --description "Local test build of SentryPythonServerlessSDK (can be deleted)" \ --compatible-runtimes python3.7 python3.8 python3.9 python3.10 python3.11 \ --no-cli-pager echo "Done deploying zipped Lambda layer to AWS as 'SentryPythonServerlessSDK-local-dev'." echo "All done. Have a nice day!" sentry-python-2.18.0/scripts/aws_lambda_functions/000077500000000000000000000000001471214654000223225ustar00rootroot00000000000000sentry-python-2.18.0/scripts/aws_lambda_functions/README.md000066400000000000000000000002241471214654000235770ustar00rootroot00000000000000aws_lambda_functions ==================== In this directory you can place AWS Lambda functions that are used for administrative tasks (or whatever)sentry-python-2.18.0/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/000077500000000000000000000000001471214654000307245ustar00rootroot00000000000000sentry-python-2.18.0/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/README.md000066400000000000000000000013241471214654000322030ustar00rootroot00000000000000sentryPythonDeleteTestFunctions =============================== This AWS Lambda function deletes all AWS Lambda functions in the current AWS account that are prefixed with `test_`. The functions that are deleted are created by the Google Actions CI checks running on every PR of the `sentry-python` repository. The Lambda function has been deployed here: - AWS Account ID: `943013980633` - Region: `us-east-1` - Function ARN: `arn:aws:lambda:us-east-1:943013980633:function:sentryPythonDeleteTestFunctions` This function also emits Sentry Metrics and Sentry Crons checkins to the `sentry-python` project in the `Sentry SDKs` organisation on Sentry.io: https://sentry-sdks.sentry.io/projects/sentry-python/?project=5461230sentry-python-2.18.0/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/lambda_function.py000066400000000000000000000030501471214654000344210ustar00rootroot00000000000000import boto3 import sentry_sdk monitor_slug = "python-sdk-aws-lambda-tests-cleanup" monitor_config = { "schedule": { "type": "crontab", "value": "0 12 * * 0", # 12 o'clock on Sunday }, "timezone": "UTC", "checkin_margin": 2, "max_runtime": 20, "failure_issue_threshold": 1, "recovery_threshold": 1, } @sentry_sdk.crons.monitor(monitor_slug=monitor_slug) def delete_lambda_functions(prefix="test_"): """ Delete all AWS Lambda functions in the current account where the function name matches the prefix """ client = boto3.client("lambda", region_name="us-east-1") functions_deleted = 0 functions_paginator = client.get_paginator("list_functions") for functions_page in functions_paginator.paginate(): for func in functions_page["Functions"]: function_name = func["FunctionName"] if function_name.startswith(prefix): try: response = client.delete_function( FunctionName=func["FunctionArn"], ) functions_deleted += 1 except Exception as ex: print(f"Got exception: {ex}") return functions_deleted def lambda_handler(event, context): functions_deleted = delete_lambda_functions() sentry_sdk.metrics.gauge( key="num_aws_functions_deleted", value=functions_deleted, ) return { "statusCode": 200, "body": f"{functions_deleted} AWS Lambda functions deleted successfully.", } sentry-python-2.18.0/scripts/build_aws_lambda_layer.py000066400000000000000000000111451471214654000231610ustar00rootroot00000000000000import os import shutil import subprocess import sys import tempfile from typing import TYPE_CHECKING from sentry_sdk.consts import VERSION as SDK_VERSION if TYPE_CHECKING: from typing import Optional DIST_PATH = "dist" # created by "make dist" that is called by "make aws-lambda-layer" PYTHON_SITE_PACKAGES = "python" # see https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path class LayerBuilder: def __init__( self, base_dir, # type: str out_zip_filename=None, # type: Optional[str] ): # type: (...) -> None self.base_dir = base_dir self.python_site_packages = os.path.join(self.base_dir, PYTHON_SITE_PACKAGES) self.out_zip_filename = ( f"sentry-python-serverless-{SDK_VERSION}.zip" if out_zip_filename is None else out_zip_filename ) def make_directories(self): # type: (...) -> None os.makedirs(self.python_site_packages) def install_python_packages(self): # type: (...) -> None # Install requirements for Lambda Layer (these are more limited than the SDK requirements, # because Lambda does not support the newest versions of some packages) subprocess.check_call( [ sys.executable, "-m", "pip", "install", "-r", "requirements-aws-lambda-layer.txt", "--target", self.python_site_packages, ], ) sentry_python_sdk = os.path.join( DIST_PATH, f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl", # this is generated by "make dist" that is called by "make aws-lambda-layer" ) subprocess.run( [ "pip", "install", "--no-cache-dir", # always access PyPI "--no-deps", # the right depencencies have been installed in the call above "--quiet", sentry_python_sdk, "--target", self.python_site_packages, ], check=True, ) def create_init_serverless_sdk_package(self): # type: (...) -> None """ Method that creates the init_serverless_sdk pkg in the sentry-python-serverless zip """ serverless_sdk_path = ( f"{self.python_site_packages}/sentry_sdk/" f"integrations/init_serverless_sdk" ) if not os.path.exists(serverless_sdk_path): os.makedirs(serverless_sdk_path) shutil.copy( "scripts/init_serverless_sdk.py", f"{serverless_sdk_path}/__init__.py" ) def zip(self): # type: (...) -> None subprocess.run( [ "zip", "-q", # Quiet "-x", # Exclude files "**/__pycache__/*", # Files to be excluded "-r", # Recurse paths self.out_zip_filename, # Output filename PYTHON_SITE_PACKAGES, # Files to be zipped ], cwd=self.base_dir, check=True, # Raises CalledProcessError if exit status is non-zero ) shutil.copy( os.path.join(self.base_dir, self.out_zip_filename), os.path.abspath(DIST_PATH), ) def build_packaged_zip(base_dir=None, make_dist=False, out_zip_filename=None): if base_dir is None: base_dir = tempfile.mkdtemp() if make_dist: # Same thing that is done by "make dist" # (which is a dependency of "make aws-lambda-layer") subprocess.check_call( [sys.executable, "setup.py", "sdist", "bdist_wheel", "-d", DIST_PATH], ) layer_builder = LayerBuilder(base_dir, out_zip_filename=out_zip_filename) layer_builder.make_directories() layer_builder.install_python_packages() layer_builder.create_init_serverless_sdk_package() layer_builder.zip() # Just for debugging dist_path = os.path.abspath(DIST_PATH) print("Created Lambda Layer package with this information:") print(" - Base directory for generating package: {}".format(layer_builder.base_dir)) print( " - Created Python SDK distribution (in `{}`): {}".format(dist_path, make_dist) ) if not make_dist: print(" If 'False' we assume it was already created (by 'make dist')") print(" - Package zip filename: {}".format(layer_builder.out_zip_filename)) print(" - Copied package zip to: {}".format(dist_path)) if __name__ == "__main__": build_packaged_zip() sentry-python-2.18.0/scripts/bump-version.sh000077500000000000000000000012201471214654000211200ustar00rootroot00000000000000#!/bin/bash set -eux if [ "$(uname -s)" != "Linux" ]; then echo "Please use the GitHub Action." exit 1 fi SCRIPT_DIR="$( dirname "$0" )" cd $SCRIPT_DIR/.. OLD_VERSION="${1}" NEW_VERSION="${2}" echo "Current version: $OLD_VERSION" echo "Bumping version: $NEW_VERSION" function replace() { ! grep "$2" $3 perl -i -pe "s/$1/$2/g" $3 grep "$2" $3 # verify that replacement was successful } replace "version=\"$OLD_VERSION\"" "version=\"$NEW_VERSION\"" ./setup.py replace "VERSION = \"$OLD_VERSION\"" "VERSION = \"$NEW_VERSION\"" ./sentry_sdk/consts.py replace "release = \"$OLD_VERSION\"" "release = \"$NEW_VERSION\"" ./docs/conf.py sentry-python-2.18.0/scripts/init_serverless_sdk.py000066400000000000000000000053151471214654000225770ustar00rootroot00000000000000""" For manual instrumentation, The Handler function string of an aws lambda function should be added as an environment variable with a key of 'SENTRY_INITIAL_HANDLER' along with the 'DSN' Then the Handler function sstring should be replaced with 'sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler' """ import os import sys import re import sentry_sdk from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any # Configure Sentry SDK sentry_sdk.init( dsn=os.environ["SENTRY_DSN"], integrations=[AwsLambdaIntegration(timeout_warning=True)], traces_sample_rate=float(os.environ["SENTRY_TRACES_SAMPLE_RATE"]), ) class AWSLambdaModuleLoader: DIR_PATH_REGEX = r"^(.+)\/([^\/]+)$" def __init__(self, sentry_initial_handler): try: module_path, self.handler_name = sentry_initial_handler.rsplit(".", 1) except ValueError: raise ValueError("Incorrect AWS Handler path (Not a path)") self.extract_and_load_lambda_function_module(module_path) def extract_and_load_lambda_function_module(self, module_path): """ Method that extracts and loads lambda function module from module_path """ py_version = sys.version_info if re.match(self.DIR_PATH_REGEX, module_path): # With a path like -> `scheduler/scheduler/event` # `module_name` is `event`, and `module_file_path` is `scheduler/scheduler/event.py` module_name = module_path.split(os.path.sep)[-1] module_file_path = module_path + ".py" # Supported python versions are 3.6, 3.7, 3.8 if py_version >= (3, 6): import importlib.util spec = importlib.util.spec_from_file_location( module_name, module_file_path ) self.lambda_function_module = importlib.util.module_from_spec(spec) spec.loader.exec_module(self.lambda_function_module) else: raise ValueError("Python version %s is not supported." % py_version) else: import importlib self.lambda_function_module = importlib.import_module(module_path) def get_lambda_handler(self): return getattr(self.lambda_function_module, self.handler_name) def sentry_lambda_handler(event, context): # type: (Any, Any) -> None """ Handler function that invokes a lambda handler which path is defined in environment variables as "SENTRY_INITIAL_HANDLER" """ module_loader = AWSLambdaModuleLoader(os.environ["SENTRY_INITIAL_HANDLER"]) return module_loader.get_lambda_handler()(event, context) sentry-python-2.18.0/scripts/runtox.sh000077500000000000000000000014631471214654000200420ustar00rootroot00000000000000#!/bin/bash # Usage: sh scripts/runtox.sh py3.12 # Runs all environments with substring py3.12 and the given arguments for pytest set -ex if [ -n "$TOXPATH" ]; then true elif which tox &> /dev/null; then TOXPATH=tox else TOXPATH=./.venv/bin/tox fi excludelatest=false for arg in "$@" do if [ "$arg" = "--exclude-latest" ]; then excludelatest=true shift break fi done searchstring="$1" if $excludelatest; then echo "Excluding latest" ENV="$($TOXPATH -l | grep -- "$searchstring" | grep -v -- '-latest' | tr $'\n' ',')" else echo "Including latest" ENV="$($TOXPATH -l | grep -- "$searchstring" | tr $'\n' ',')" fi if [ -z "${ENV}" ]; then echo "No targets found. Skipping." exit 0 fi exec $TOXPATH -p auto -o -e "$ENV" -- "${@:2}" sentry-python-2.18.0/scripts/split-tox-gh-actions/000077500000000000000000000000001471214654000221355ustar00rootroot00000000000000sentry-python-2.18.0/scripts/split-tox-gh-actions/split-tox-gh-actions.py000077500000000000000000000222461471214654000265150ustar00rootroot00000000000000"""Split Tox to GitHub Actions This is a small script to split a tox.ini config file into multiple GitHub actions configuration files. This way each group of frameworks defined in tox.ini will get its own GitHub actions configuration file which allows them to be run in parallel in GitHub actions. This will generate/update several configuration files, that need to be commited to Git afterwards. Whenever tox.ini is changed, this script needs to be run. Usage: python split-tox-gh-actions.py [--fail-on-changes] If the parameter `--fail-on-changes` is set, the script will raise a RuntimeError in case the yaml files have been changed by the scripts execution. This is used in CI to check if the yaml files represent the current tox.ini file. (And if not the CI run fails.) """ import configparser import hashlib import sys from collections import defaultdict from functools import reduce from glob import glob from pathlib import Path from jinja2 import Environment, FileSystemLoader OUT_DIR = Path(__file__).resolve().parent.parent.parent / ".github" / "workflows" TOX_FILE = Path(__file__).resolve().parent.parent.parent / "tox.ini" TEMPLATE_DIR = Path(__file__).resolve().parent / "templates" FRAMEWORKS_NEEDING_POSTGRES = { "django", "asyncpg", } FRAMEWORKS_NEEDING_REDIS = { "celery", } FRAMEWORKS_NEEDING_CLICKHOUSE = { "clickhouse_driver", } FRAMEWORKS_NEEDING_AWS = { "aws_lambda", } FRAMEWORKS_NEEDING_GITHUB_SECRETS = { "aws_lambda", } # Frameworks grouped here will be tested together to not hog all GitHub runners. # If you add or remove a group, make sure to git rm the generated YAML file as # well. GROUPS = { "Common": [ "common", ], "AI": [ "anthropic", "cohere", "langchain", "openai", "huggingface_hub", ], "AWS Lambda": [ # this is separate from Cloud Computing because only this one test suite # needs to run with access to GitHub secrets "aws_lambda", ], "Cloud Computing": [ "boto3", "chalice", "cloud_resource_context", "gcp", ], "Data Processing": [ "arq", "beam", "celery", "dramatiq", "huey", "ray", "rq", "spark", ], "Databases": [ "asyncpg", "clickhouse_driver", "pymongo", "redis", "redis_py_cluster_legacy", "sqlalchemy", ], "GraphQL": [ "ariadne", "gql", "graphene", "strawberry", ], "Networking": [ "gevent", "grpc", "httpx", "requests", ], "Web Frameworks 1": [ "django", "flask", "starlette", "fastapi", ], "Web Frameworks 2": [ "aiohttp", "asgi", "bottle", "falcon", "litestar", "pyramid", "quart", "sanic", "starlite", "tornado", ], "Miscellaneous": [ "launchdarkly", "loguru", "openfeature", "opentelemetry", "potel", "pure_eval", "trytond", ], } ENV = Environment( loader=FileSystemLoader(TEMPLATE_DIR), ) def main(fail_on_changes): """Create one CI workflow for each framework defined in tox.ini.""" if fail_on_changes: old_hash = get_files_hash() print("Parsing tox.ini...") py_versions_pinned, py_versions_latest = parse_tox() if fail_on_changes: print("Checking if all frameworks belong in a group...") missing_frameworks = find_frameworks_missing_from_groups( py_versions_pinned, py_versions_latest ) if missing_frameworks: raise RuntimeError( "Please add the following frameworks to the corresponding group " "in `GROUPS` in `scripts/split-tox-gh-actions/split-tox-gh-actions.py: " + ", ".join(missing_frameworks) ) print("Rendering templates...") for group, frameworks in GROUPS.items(): contents = render_template( group, frameworks, py_versions_pinned, py_versions_latest ) filename = write_file(contents, group) print(f"Created {filename}") if fail_on_changes: new_hash = get_files_hash() if old_hash != new_hash: raise RuntimeError( "The yaml configuration files have changed. This means that either `tox.ini` " "or one of the constants in `split-tox-gh-actions.py` has changed " "but the changes have not been propagated to the GitHub actions config files. " "Please run `python scripts/split-tox-gh-actions/split-tox-gh-actions.py` " "locally and commit the changes of the yaml configuration files to continue. " ) print("All done. Have a nice day!") def parse_tox(): config = configparser.ConfigParser() config.read(TOX_FILE) lines = [ line for line in config["tox"]["envlist"].split("\n") if line.strip() and not line.strip().startswith("#") ] py_versions_pinned = defaultdict(set) py_versions_latest = defaultdict(set) for line in lines: # normalize lines line = line.strip().lower() try: # parse tox environment definition try: (raw_python_versions, framework, framework_versions) = line.split("-") except ValueError: (raw_python_versions, framework) = line.split("-") framework_versions = [] # collect python versions to test the framework in raw_python_versions = set( raw_python_versions.replace("{", "").replace("}", "").split(",") ) if "latest" in framework_versions: py_versions_latest[framework] |= raw_python_versions else: py_versions_pinned[framework] |= raw_python_versions except ValueError: print(f"ERROR reading line {line}") py_versions_pinned = _normalize_py_versions(py_versions_pinned) py_versions_latest = _normalize_py_versions(py_versions_latest) return py_versions_pinned, py_versions_latest def find_frameworks_missing_from_groups(py_versions_pinned, py_versions_latest): frameworks_in_a_group = _union(GROUPS.values()) all_frameworks = set(py_versions_pinned.keys()) | set(py_versions_latest.keys()) return all_frameworks - frameworks_in_a_group def _normalize_py_versions(py_versions): def replace_and_sort(versions): return sorted( [py.replace("py", "") for py in versions], key=lambda v: tuple(map(int, v.split("."))), ) if isinstance(py_versions, dict): normalized = defaultdict(set) normalized |= { framework: replace_and_sort(versions) for framework, versions in py_versions.items() } elif isinstance(py_versions, set): normalized = replace_and_sort(py_versions) return normalized def get_files_hash(): """Calculate a hash of all the yaml configuration files""" hasher = hashlib.md5() path_pattern = (OUT_DIR / "test-integrations-*.yml").as_posix() for file in glob(path_pattern): with open(file, "rb") as f: buf = f.read() hasher.update(buf) return hasher.hexdigest() def _union(seq): return reduce(lambda x, y: set(x) | set(y), seq) def render_template(group, frameworks, py_versions_pinned, py_versions_latest): template = ENV.get_template("base.jinja") categories = set() py_versions = defaultdict(set) for framework in frameworks: if py_versions_pinned[framework]: categories.add("pinned") py_versions["pinned"] |= set(py_versions_pinned[framework]) if py_versions_latest[framework]: categories.add("latest") py_versions["latest"] |= set(py_versions_latest[framework]) context = { "group": group, "frameworks": frameworks, "categories": sorted(categories), "needs_aws_credentials": bool(set(frameworks) & FRAMEWORKS_NEEDING_AWS), "needs_clickhouse": bool(set(frameworks) & FRAMEWORKS_NEEDING_CLICKHOUSE), "needs_postgres": bool(set(frameworks) & FRAMEWORKS_NEEDING_POSTGRES), "needs_redis": bool(set(frameworks) & FRAMEWORKS_NEEDING_REDIS), "needs_github_secrets": bool( set(frameworks) & FRAMEWORKS_NEEDING_GITHUB_SECRETS ), "py_versions": { category: [f'"{version}"' for version in _normalize_py_versions(versions)] for category, versions in py_versions.items() }, } rendered = template.render(context) rendered = postprocess_template(rendered) return rendered def postprocess_template(rendered): return "\n".join([line for line in rendered.split("\n") if line.strip()]) + "\n" def write_file(contents, group): group = group.lower().replace(" ", "-") outfile = OUT_DIR / f"test-integrations-{group}.yml" with open(outfile, "w") as file: file.write(contents) return outfile if __name__ == "__main__": fail_on_changes = len(sys.argv) == 2 and sys.argv[1] == "--fail-on-changes" main(fail_on_changes) sentry-python-2.18.0/scripts/split-tox-gh-actions/templates/000077500000000000000000000000001471214654000241335ustar00rootroot00000000000000sentry-python-2.18.0/scripts/split-tox-gh-actions/templates/base.jinja000066400000000000000000000033411471214654000260630ustar00rootroot00000000000000# Do not edit this file. This file is generated automatically by executing # python scripts/split-tox-gh-actions/split-tox-gh-actions.py {% with lowercase_group=group | replace(" ", "_") | lower %} name: Test {{ group }} on: push: branches: - master - release/** - sentry-sdk-2.0 {% if needs_github_secrets %} # XXX: We are using `pull_request_target` instead of `pull_request` because we want # this to run on forks with access to the secrets necessary to run the test suite. # Prefer to use `pull_request` when possible. pull_request_target: types: [labeled, opened, reopened, synchronize] {% else %} pull_request: {% endif %} # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: {% raw %}${{ github.workflow }}-${{ github.head_ref || github.run_id }}{% endraw %} cancel-in-progress: true permissions: contents: read {% if needs_github_secrets %} # `write` is needed to remove the `Trigger: tests using secrets` label pull-requests: write {% endif %} env: {% if needs_aws_credentials %} {% raw %} SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID: ${{ secrets.SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID }} SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY: ${{ secrets.SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY }} {% endraw %} {% endif %} BUILD_CACHE_KEY: {% raw %}${{ github.sha }}{% endraw %} CACHED_BUILD_PATHS: | {% raw %}${{ github.workspace }}/dist-serverless{% endraw %} jobs: {% if needs_github_secrets %} {% include "check_permissions.jinja" %} {% endif %} {% for category in categories %} {% include "test_group.jinja" %} {% endfor %} {% include "check_required.jinja" %} {% endwith %} sentry-python-2.18.0/scripts/split-tox-gh-actions/templates/check_permissions.jinja000066400000000000000000000020511471214654000306560ustar00rootroot00000000000000 check-permissions: name: permissions check runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v4.2.1 with: persist-credentials: false - name: Check permissions on PR if: github.event_name == 'pull_request_target' run: | {% raw %} python3 -uS .github/workflows/scripts/trigger_tests_on_label.py \ --repo-id ${{ github.event.repository.id }} \ --pr ${{ github.event.number }} \ --event ${{ github.event.action }} \ --username "$ARG_USERNAME" \ --label-names "$ARG_LABEL_NAMES" {% endraw %} env: {% raw %} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # these can contain special characters ARG_USERNAME: ${{ github.event.pull_request.user.login }} ARG_LABEL_NAMES: ${{ toJSON(github.event.pull_request.labels.*.name) }} {% endraw %} - name: Check permissions on repo branch if: github.event_name == 'push' run: true sentry-python-2.18.0/scripts/split-tox-gh-actions/templates/check_required.jinja000066400000000000000000000011101471214654000301160ustar00rootroot00000000000000 check_required_tests: name: All pinned {{ group }} tests passed {% if "pinned" in categories %} needs: test-{{ group | replace(" ", "_") | lower }}-pinned {% endif %} # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-{{ lowercase_group }}-pinned.result, 'failure') || contains(needs.test-{{ lowercase_group }}-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-2.18.0/scripts/split-tox-gh-actions/templates/test_group.jinja000066400000000000000000000071421471214654000273470ustar00rootroot00000000000000 test-{{ lowercase_group }}-{{ category }}: name: {{ group }} ({{ category }}) timeout-minutes: 30 runs-on: {% raw %}${{ matrix.os }}{% endraw %} strategy: fail-fast: false matrix: python-version: [{{ py_versions.get(category)|join(",") }}] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] {% if needs_github_secrets %} needs: check-permissions {% endif %} {% if needs_postgres %} services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry {% endif %} steps: - uses: actions/checkout@v4.2.1 {% if needs_github_secrets %} {% raw %} with: ref: ${{ github.event.pull_request.head.sha || github.ref }} {% endraw %} {% endif %} - uses: actions/setup-python@v5 with: python-version: {% raw %}${{ matrix.python-version }}{% endraw %} allow-prereleases: true {% if needs_clickhouse %} - uses: getsentry/action-clickhouse-in-ci@v1 {% endif %} {% if needs_redis %} - name: Start Redis uses: supercharge/redis-github-action@1.8.0 {% endif %} - name: Setup Test Env run: | pip install "coverage[toml]" tox - name: Erase coverage run: | coverage erase {% for framework in frameworks %} - name: Test {{ framework }} {{ category }} run: | set -x # print commands that are executed {% if category == "pinned" %} ./scripts/runtox.sh --exclude-latest "{% raw %}py${{ matrix.python-version }}{% endraw %}-{{ framework }}" {% elif category == "latest" %} ./scripts/runtox.sh "{% raw %}py${{ matrix.python-version }}{% endraw %}-{{ framework }}-latest" {% endif %} {% endfor %} - name: Generate coverage XML (Python 3.6) if: {% raw %}${{ !cancelled() && matrix.python-version == '3.6' }}{% endraw %} run: | export COVERAGE_RCFILE=.coveragerc36 coverage combine .coverage-sentry-* coverage xml --ignore-errors - name: Generate coverage XML if: {% raw %}${{ !cancelled() && matrix.python-version != '3.6' }}{% endraw %} run: | coverage combine .coverage-sentry-* coverage xml - name: Upload coverage to Codecov if: {% raw %}${{ !cancelled() }}{% endraw %} uses: codecov/codecov-action@v4.6.0 with: token: {% raw %}${{ secrets.CODECOV_TOKEN }}{% endraw %} files: coverage.xml # make sure no plugins alter our coverage reports plugin: noop verbose: true - name: Upload test results to Codecov if: {% raw %}${{ !cancelled() }}{% endraw %} uses: codecov/test-results-action@v1 with: token: {% raw %}${{ secrets.CODECOV_TOKEN }}{% endraw %} files: .junitxml verbose: true sentry-python-2.18.0/sentry_sdk/000077500000000000000000000000001471214654000166365ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/__init__.py000066400000000000000000000022331471214654000207470ustar00rootroot00000000000000from sentry_sdk.scope import Scope from sentry_sdk.transport import Transport, HttpTransport from sentry_sdk.client import Client from sentry_sdk.api import * # noqa from sentry_sdk.consts import VERSION # noqa __all__ = [ # noqa "Hub", "Scope", "Client", "Transport", "HttpTransport", "integrations", # From sentry_sdk.api "init", "add_breadcrumb", "capture_event", "capture_exception", "capture_message", "configure_scope", "continue_trace", "flush", "get_baggage", "get_client", "get_global_scope", "get_isolation_scope", "get_current_scope", "get_current_span", "get_traceparent", "is_initialized", "isolation_scope", "last_event_id", "new_scope", "push_scope", "set_context", "set_extra", "set_level", "set_measurement", "set_tag", "set_tags", "set_user", "start_span", "start_transaction", "trace", "monitor", ] # Initialize the debug support after everything is loaded from sentry_sdk.debug import init_debug_support init_debug_support() del init_debug_support # circular imports from sentry_sdk.hub import Hub sentry-python-2.18.0/sentry_sdk/_compat.py000066400000000000000000000060541471214654000206370ustar00rootroot00000000000000import sys from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import TypeVar T = TypeVar("T") PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7 PY38 = sys.version_info[0] == 3 and sys.version_info[1] >= 8 PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10 PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11 def with_metaclass(meta, *bases): # type: (Any, *Any) -> Any class MetaClass(type): def __new__(metacls, name, this_bases, d): # type: (Any, Any, Any, Any) -> Any return meta(name, bases, d) return type.__new__(MetaClass, "temporary_class", (), {}) def check_uwsgi_thread_support(): # type: () -> bool # We check two things here: # # 1. uWSGI doesn't run in threaded mode by default -- issue a warning if # that's the case. # # 2. Additionally, if uWSGI is running in preforking mode (default), it needs # the --py-call-uwsgi-fork-hooks option for the SDK to work properly. This # is because any background threads spawned before the main process is # forked are NOT CLEANED UP IN THE CHILDREN BY DEFAULT even if # --enable-threads is on. One has to explicitly provide # --py-call-uwsgi-fork-hooks to force uWSGI to run regular cpython # after-fork hooks that take care of cleaning up stale thread data. try: from uwsgi import opt # type: ignore except ImportError: return True from sentry_sdk.consts import FALSE_VALUES def enabled(option): # type: (str) -> bool value = opt.get(option, False) if isinstance(value, bool): return value if isinstance(value, bytes): try: value = value.decode() except Exception: pass return value and str(value).lower() not in FALSE_VALUES # When `threads` is passed in as a uwsgi option, # `enable-threads` is implied on. threads_enabled = "threads" in opt or enabled("enable-threads") fork_hooks_on = enabled("py-call-uwsgi-fork-hooks") lazy_mode = enabled("lazy-apps") or enabled("lazy") if lazy_mode and not threads_enabled: from warnings import warn warn( Warning( "IMPORTANT: " "We detected the use of uWSGI without thread support. " "This might lead to unexpected issues. " 'Please run uWSGI with "--enable-threads" for full support.' ) ) return False elif not lazy_mode and (not threads_enabled or not fork_hooks_on): from warnings import warn warn( Warning( "IMPORTANT: " "We detected the use of uWSGI in preforking mode without " "thread support. This might lead to crashing workers. " 'Please run uWSGI with both "--enable-threads" and ' '"--py-call-uwsgi-fork-hooks" for full support.' ) ) return False return True sentry-python-2.18.0/sentry_sdk/_init_implementation.py000066400000000000000000000035021471214654000234170ustar00rootroot00000000000000from typing import TYPE_CHECKING import sentry_sdk if TYPE_CHECKING: from typing import Any, ContextManager, Optional import sentry_sdk.consts class _InitGuard: def __init__(self, client): # type: (sentry_sdk.Client) -> None self._client = client def __enter__(self): # type: () -> _InitGuard return self def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None c = self._client if c is not None: c.close() def _check_python_deprecations(): # type: () -> None # Since we're likely to deprecate Python versions in the future, I'm keeping # this handy function around. Use this to detect the Python version used and # to output logger.warning()s if it's deprecated. pass def _init(*args, **kwargs): # type: (*Optional[str], **Any) -> ContextManager[Any] """Initializes the SDK and optionally integrations. This takes the same arguments as the client constructor. """ client = sentry_sdk.Client(*args, **kwargs) sentry_sdk.get_global_scope().set_client(client) _check_python_deprecations() rv = _InitGuard(client) return rv if TYPE_CHECKING: # Make mypy, PyCharm and other static analyzers think `init` is a type to # have nicer autocompletion for params. # # Use `ClientConstructor` to define the argument types of `init` and # `ContextManager[Any]` to tell static analyzers about the return type. class init(sentry_sdk.consts.ClientConstructor, _InitGuard): # noqa: N801 pass else: # Alias `init` for actual usage. Go through the lambda indirection to throw # PyCharm off of the weakly typed signature (it would otherwise discover # both the weakly typed signature of `_init` and our faked `init` type). init = (lambda: _init)() sentry-python-2.18.0/sentry_sdk/_lru_cache.py000066400000000000000000000132631471214654000213010ustar00rootroot00000000000000""" A fork of Python 3.6's stdlib lru_cache (found in Python's 'cpython/Lib/functools.py') adapted into a data structure for single threaded uses. https://github.com/python/cpython/blob/v3.6.12/Lib/functools.py Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; All Rights Reserved PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python. 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. """ from copy import copy SENTINEL = object() # aliases to the entries in a node PREV = 0 NEXT = 1 KEY = 2 VALUE = 3 class LRUCache: def __init__(self, max_size): assert max_size > 0 self.max_size = max_size self.full = False self.cache = {} # root of the circularly linked list to keep track of # the least recently used key self.root = [] # type: ignore # the node looks like [PREV, NEXT, KEY, VALUE] self.root[:] = [self.root, self.root, None, None] self.hits = self.misses = 0 def __copy__(self): cache = LRUCache(self.max_size) cache.full = self.full cache.cache = copy(self.cache) cache.root = copy(self.root) return cache def set(self, key, value): link = self.cache.get(key, SENTINEL) if link is not SENTINEL: # have to move the node to the front of the linked list link_prev, link_next, _key, _value = link # first remove the node from the lsnked list link_prev[NEXT] = link_next link_next[PREV] = link_prev # insert the node between the root and the last last = self.root[PREV] last[NEXT] = self.root[PREV] = link link[PREV] = last link[NEXT] = self.root # update the value link[VALUE] = value elif self.full: # reuse the root node, so update its key/value old_root = self.root old_root[KEY] = key old_root[VALUE] = value self.root = old_root[NEXT] old_key = self.root[KEY] self.root[KEY] = self.root[VALUE] = None del self.cache[old_key] self.cache[key] = old_root else: # insert new node after last last = self.root[PREV] link = [last, self.root, key, value] last[NEXT] = self.root[PREV] = self.cache[key] = link self.full = len(self.cache) >= self.max_size def get(self, key, default=None): link = self.cache.get(key, SENTINEL) if link is SENTINEL: self.misses += 1 return default # have to move the node to the front of the linked list link_prev, link_next, _key, _value = link # first remove the node from the lsnked list link_prev[NEXT] = link_next link_next[PREV] = link_prev # insert the node between the root and the last last = self.root[PREV] last[NEXT] = self.root[PREV] = link link[PREV] = last link[NEXT] = self.root self.hits += 1 return link[VALUE] def get_all(self): nodes = [] node = self.root[NEXT] while node is not self.root: nodes.append((node[KEY], node[VALUE])) node = node[NEXT] return nodes sentry-python-2.18.0/sentry_sdk/_queue.py000066400000000000000000000257601471214654000205050ustar00rootroot00000000000000""" A fork of Python 3.6's stdlib queue (found in Pythons 'cpython/Lib/queue.py') with Lock swapped out for RLock to avoid a deadlock while garbage collecting. https://github.com/python/cpython/blob/v3.6.12/Lib/queue.py See also https://codewithoutrules.com/2017/08/16/concurrency-python/ https://bugs.python.org/issue14976 https://github.com/sqlalchemy/sqlalchemy/blob/4eb747b61f0c1b1c25bdee3856d7195d10a0c227/lib/sqlalchemy/queue.py#L1 We also vendor the code to evade eventlet's broken monkeypatching, see https://github.com/getsentry/sentry-python/pull/484 Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; All Rights Reserved PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python. 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. """ import threading from collections import deque from time import time from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any __all__ = ["EmptyError", "FullError", "Queue"] class EmptyError(Exception): "Exception raised by Queue.get(block=0)/get_nowait()." pass class FullError(Exception): "Exception raised by Queue.put(block=0)/put_nowait()." pass class Queue: """Create a queue object with a given maximum size. If maxsize is <= 0, the queue size is infinite. """ def __init__(self, maxsize=0): self.maxsize = maxsize self._init(maxsize) # mutex must be held whenever the queue is mutating. All methods # that acquire mutex must release it before returning. mutex # is shared between the three conditions, so acquiring and # releasing the conditions also acquires and releases mutex. self.mutex = threading.RLock() # Notify not_empty whenever an item is added to the queue; a # thread waiting to get is notified then. self.not_empty = threading.Condition(self.mutex) # Notify not_full whenever an item is removed from the queue; # a thread waiting to put is notified then. self.not_full = threading.Condition(self.mutex) # Notify all_tasks_done whenever the number of unfinished tasks # drops to zero; thread waiting to join() is notified to resume self.all_tasks_done = threading.Condition(self.mutex) self.unfinished_tasks = 0 def task_done(self): """Indicate that a formerly enqueued task is complete. Used by Queue consumer threads. For each get() used to fetch a task, a subsequent call to task_done() tells the queue that the processing on the task is complete. If a join() is currently blocking, it will resume when all items have been processed (meaning that a task_done() call was received for every item that had been put() into the queue). Raises a ValueError if called more times than there were items placed in the queue. """ with self.all_tasks_done: unfinished = self.unfinished_tasks - 1 if unfinished <= 0: if unfinished < 0: raise ValueError("task_done() called too many times") self.all_tasks_done.notify_all() self.unfinished_tasks = unfinished def join(self): """Blocks until all items in the Queue have been gotten and processed. The count of unfinished tasks goes up whenever an item is added to the queue. The count goes down whenever a consumer thread calls task_done() to indicate the item was retrieved and all work on it is complete. When the count of unfinished tasks drops to zero, join() unblocks. """ with self.all_tasks_done: while self.unfinished_tasks: self.all_tasks_done.wait() def qsize(self): """Return the approximate size of the queue (not reliable!).""" with self.mutex: return self._qsize() def empty(self): """Return True if the queue is empty, False otherwise (not reliable!). This method is likely to be removed at some point. Use qsize() == 0 as a direct substitute, but be aware that either approach risks a race condition where a queue can grow before the result of empty() or qsize() can be used. To create code that needs to wait for all queued tasks to be completed, the preferred technique is to use the join() method. """ with self.mutex: return not self._qsize() def full(self): """Return True if the queue is full, False otherwise (not reliable!). This method is likely to be removed at some point. Use qsize() >= n as a direct substitute, but be aware that either approach risks a race condition where a queue can shrink before the result of full() or qsize() can be used. """ with self.mutex: return 0 < self.maxsize <= self._qsize() def put(self, item, block=True, timeout=None): """Put an item into the queue. If optional args 'block' is true and 'timeout' is None (the default), block if necessary until a free slot is available. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and raises the FullError exception if no free slot was available within that time. Otherwise ('block' is false), put an item on the queue if a free slot is immediately available, else raise the FullError exception ('timeout' is ignored in that case). """ with self.not_full: if self.maxsize > 0: if not block: if self._qsize() >= self.maxsize: raise FullError() elif timeout is None: while self._qsize() >= self.maxsize: self.not_full.wait() elif timeout < 0: raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while self._qsize() >= self.maxsize: remaining = endtime - time() if remaining <= 0.0: raise FullError() self.not_full.wait(remaining) self._put(item) self.unfinished_tasks += 1 self.not_empty.notify() def get(self, block=True, timeout=None): """Remove and return an item from the queue. If optional args 'block' is true and 'timeout' is None (the default), block if necessary until an item is available. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and raises the EmptyError exception if no item was available within that time. Otherwise ('block' is false), return an item if one is immediately available, else raise the EmptyError exception ('timeout' is ignored in that case). """ with self.not_empty: if not block: if not self._qsize(): raise EmptyError() elif timeout is None: while not self._qsize(): self.not_empty.wait() elif timeout < 0: raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while not self._qsize(): remaining = endtime - time() if remaining <= 0.0: raise EmptyError() self.not_empty.wait(remaining) item = self._get() self.not_full.notify() return item def put_nowait(self, item): """Put an item into the queue without blocking. Only enqueue the item if a free slot is immediately available. Otherwise raise the FullError exception. """ return self.put(item, block=False) def get_nowait(self): """Remove and return an item from the queue without blocking. Only get an item if one is immediately available. Otherwise raise the EmptyError exception. """ return self.get(block=False) # Override these methods to implement other queue organizations # (e.g. stack or priority queue). # These will only be called with appropriate locks held # Initialize the queue representation def _init(self, maxsize): self.queue = deque() # type: Any def _qsize(self): return len(self.queue) # Put a new item in the queue def _put(self, item): self.queue.append(item) # Get an item from the queue def _get(self): return self.queue.popleft() sentry-python-2.18.0/sentry_sdk/_types.py000066400000000000000000000152261471214654000205210ustar00rootroot00000000000000from typing import TYPE_CHECKING # Re-exported for compat, since code out there in the wild might use this variable. MYPY = TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Container, MutableMapping, Sequence from datetime import datetime from types import TracebackType from typing import Any from typing import Callable from typing import Dict from typing import Mapping from typing import NotRequired from typing import Optional from typing import Tuple from typing import Type from typing import Union from typing_extensions import Literal, TypedDict class SDKInfo(TypedDict): name: str version: str packages: Sequence[Mapping[str, str]] # "critical" is an alias of "fatal" recognized by Relay LogLevelStr = Literal["fatal", "critical", "error", "warning", "info", "debug"] DurationUnit = Literal[ "nanosecond", "microsecond", "millisecond", "second", "minute", "hour", "day", "week", ] InformationUnit = Literal[ "bit", "byte", "kilobyte", "kibibyte", "megabyte", "mebibyte", "gigabyte", "gibibyte", "terabyte", "tebibyte", "petabyte", "pebibyte", "exabyte", "exbibyte", ] FractionUnit = Literal["ratio", "percent"] MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str] MeasurementValue = TypedDict( "MeasurementValue", { "value": float, "unit": NotRequired[Optional[MeasurementUnit]], }, ) Event = TypedDict( "Event", { "breadcrumbs": dict[ Literal["values"], list[dict[str, Any]] ], # TODO: We can expand on this type "check_in_id": str, "contexts": dict[str, dict[str, object]], "dist": str, "duration": Optional[float], "environment": str, "errors": list[dict[str, Any]], # TODO: We can expand on this type "event_id": str, "exception": dict[ Literal["values"], list[dict[str, Any]] ], # TODO: We can expand on this type "extra": MutableMapping[str, object], "fingerprint": list[str], "level": LogLevelStr, "logentry": Mapping[str, object], "logger": str, "measurements": dict[str, MeasurementValue], "message": str, "modules": dict[str, str], "monitor_config": Mapping[str, object], "monitor_slug": Optional[str], "platform": Literal["python"], "profile": object, # Should be sentry_sdk.profiler.Profile, but we can't import that here due to circular imports "release": str, "request": dict[str, object], "sdk": Mapping[str, object], "server_name": str, "spans": list[dict[str, object]], "stacktrace": dict[ str, object ], # We access this key in the code, but I am unsure whether we ever set it "start_timestamp": datetime, "status": Optional[str], "tags": MutableMapping[ str, str ], # Tags must be less than 200 characters each "threads": dict[ Literal["values"], list[dict[str, Any]] ], # TODO: We can expand on this type "timestamp": Optional[datetime], # Must be set before sending the event "transaction": str, "transaction_info": Mapping[str, Any], # TODO: We can expand on this type "type": Literal["check_in", "transaction"], "user": dict[str, object], "_metrics_summary": dict[str, object], }, total=False, ) ExcInfo = Union[ tuple[Type[BaseException], BaseException, Optional[TracebackType]], tuple[None, None, None], ] Hint = Dict[str, Any] Breadcrumb = Dict[str, Any] BreadcrumbHint = Dict[str, Any] SamplingContext = Dict[str, Any] EventProcessor = Callable[[Event, Hint], Optional[Event]] ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]] BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]] TransactionProcessor = Callable[[Event, Hint], Optional[Event]] TracesSampler = Callable[[SamplingContext], Union[float, int, bool]] # https://github.com/python/mypy/issues/5710 NotImplementedType = Any EventDataCategory = Literal[ "default", "error", "crash", "transaction", "security", "attachment", "session", "internal", "profile", "profile_chunk", "metric_bucket", "monitor", "span", ] SessionStatus = Literal["ok", "exited", "crashed", "abnormal"] ContinuousProfilerMode = Literal["thread", "gevent", "unknown"] ProfilerMode = Union[ContinuousProfilerMode, Literal["sleep"]] # Type of the metric. MetricType = Literal["d", "s", "g", "c"] # Value of the metric. MetricValue = Union[int, float, str] # Internal representation of tags as a tuple of tuples (this is done in order to allow for the same key to exist # multiple times). MetricTagsInternal = Tuple[Tuple[str, str], ...] # External representation of tags as a dictionary. MetricTagValue = Union[str, int, float, None] MetricTags = Mapping[str, MetricTagValue] # Value inside the generator for the metric value. FlushedMetricValue = Union[int, float] BucketKey = Tuple[MetricType, str, MeasurementUnit, MetricTagsInternal] MetricMetaKey = Tuple[MetricType, str, MeasurementUnit] MonitorConfigScheduleType = Literal["crontab", "interval"] MonitorConfigScheduleUnit = Literal[ "year", "month", "week", "day", "hour", "minute", "second", # not supported in Sentry and will result in a warning ] MonitorConfigSchedule = TypedDict( "MonitorConfigSchedule", { "type": MonitorConfigScheduleType, "value": Union[int, str], "unit": MonitorConfigScheduleUnit, }, total=False, ) MonitorConfig = TypedDict( "MonitorConfig", { "schedule": MonitorConfigSchedule, "timezone": str, "checkin_margin": int, "max_runtime": int, "failure_issue_threshold": int, "recovery_threshold": int, }, total=False, ) HttpStatusCodeRange = Union[int, Container[int]] sentry-python-2.18.0/sentry_sdk/_werkzeug.py000066400000000000000000000072261471214654000212210ustar00rootroot00000000000000""" Copyright (c) 2007 by the Pallets team. Some rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Dict from typing import Iterator from typing import Tuple # # `get_headers` comes from `werkzeug.datastructures.EnvironHeaders` # https://github.com/pallets/werkzeug/blob/0.14.1/werkzeug/datastructures.py#L1361 # # We need this function because Django does not give us a "pure" http header # dict. So we might as well use it for all WSGI integrations. # def _get_headers(environ): # type: (Dict[str, str]) -> Iterator[Tuple[str, str]] """ Returns only proper HTTP headers. """ for key, value in environ.items(): key = str(key) if key.startswith("HTTP_") and key not in ( "HTTP_CONTENT_TYPE", "HTTP_CONTENT_LENGTH", ): yield key[5:].replace("_", "-").title(), value elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"): yield key.replace("_", "-").title(), value # # `get_host` comes from `werkzeug.wsgi.get_host` # https://github.com/pallets/werkzeug/blob/1.0.1/src/werkzeug/wsgi.py#L145 # def get_host(environ, use_x_forwarded_for=False): # type: (Dict[str, str], bool) -> str """ Return the host for the given WSGI environment. """ if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ: rv = environ["HTTP_X_FORWARDED_HOST"] if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"): rv = rv[:-3] elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"): rv = rv[:-4] elif environ.get("HTTP_HOST"): rv = environ["HTTP_HOST"] if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"): rv = rv[:-3] elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"): rv = rv[:-4] elif environ.get("SERVER_NAME"): rv = environ["SERVER_NAME"] if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in ( ("https", "443"), ("http", "80"), ): rv += ":" + environ["SERVER_PORT"] else: # In spite of the WSGI spec, SERVER_NAME might not be present. rv = "unknown" return rv sentry-python-2.18.0/sentry_sdk/ai/000077500000000000000000000000001471214654000172275ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/ai/__init__.py000066400000000000000000000000001471214654000213260ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/ai/monitoring.py000066400000000000000000000105361471214654000217730ustar00rootroot00000000000000import inspect from functools import wraps import sentry_sdk.utils from sentry_sdk import start_span from sentry_sdk.tracing import Span from sentry_sdk.utils import ContextVar from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional, Callable, Any _ai_pipeline_name = ContextVar("ai_pipeline_name", default=None) def set_ai_pipeline_name(name): # type: (Optional[str]) -> None _ai_pipeline_name.set(name) def get_ai_pipeline_name(): # type: () -> Optional[str] return _ai_pipeline_name.get() def ai_track(description, **span_kwargs): # type: (str, Any) -> Callable[..., Any] def decorator(f): # type: (Callable[..., Any]) -> Callable[..., Any] def sync_wrapped(*args, **kwargs): # type: (Any, Any) -> Any curr_pipeline = _ai_pipeline_name.get() op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline") with start_span(name=description, op=op, **span_kwargs) as span: for k, v in kwargs.pop("sentry_tags", {}).items(): span.set_tag(k, v) for k, v in kwargs.pop("sentry_data", {}).items(): span.set_data(k, v) if curr_pipeline: span.set_data("ai.pipeline.name", curr_pipeline) return f(*args, **kwargs) else: _ai_pipeline_name.set(description) try: res = f(*args, **kwargs) except Exception as e: event, hint = sentry_sdk.utils.event_from_exception( e, client_options=sentry_sdk.get_client().options, mechanism={"type": "ai_monitoring", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) raise e from None finally: _ai_pipeline_name.set(None) return res async def async_wrapped(*args, **kwargs): # type: (Any, Any) -> Any curr_pipeline = _ai_pipeline_name.get() op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline") with start_span(name=description, op=op, **span_kwargs) as span: for k, v in kwargs.pop("sentry_tags", {}).items(): span.set_tag(k, v) for k, v in kwargs.pop("sentry_data", {}).items(): span.set_data(k, v) if curr_pipeline: span.set_data("ai.pipeline.name", curr_pipeline) return await f(*args, **kwargs) else: _ai_pipeline_name.set(description) try: res = await f(*args, **kwargs) except Exception as e: event, hint = sentry_sdk.utils.event_from_exception( e, client_options=sentry_sdk.get_client().options, mechanism={"type": "ai_monitoring", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) raise e from None finally: _ai_pipeline_name.set(None) return res if inspect.iscoroutinefunction(f): return wraps(f)(async_wrapped) else: return wraps(f)(sync_wrapped) return decorator def record_token_usage( span, prompt_tokens=None, completion_tokens=None, total_tokens=None ): # type: (Span, Optional[int], Optional[int], Optional[int]) -> None ai_pipeline_name = get_ai_pipeline_name() if ai_pipeline_name: span.set_data("ai.pipeline.name", ai_pipeline_name) if prompt_tokens is not None: span.set_measurement("ai_prompt_tokens_used", value=prompt_tokens) if completion_tokens is not None: span.set_measurement("ai_completion_tokens_used", value=completion_tokens) if ( total_tokens is None and prompt_tokens is not None and completion_tokens is not None ): total_tokens = prompt_tokens + completion_tokens if total_tokens is not None: span.set_measurement("ai_total_tokens_used", total_tokens) sentry-python-2.18.0/sentry_sdk/ai/utils.py000066400000000000000000000016661471214654000207520ustar00rootroot00000000000000from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from sentry_sdk.tracing import Span from sentry_sdk.utils import logger def _normalize_data(data): # type: (Any) -> Any # convert pydantic data (e.g. OpenAI v1+) to json compatible format if hasattr(data, "model_dump"): try: return data.model_dump() except Exception as e: logger.warning("Could not convert pydantic data to JSON: %s", e) return data if isinstance(data, list): if len(data) == 1: return _normalize_data(data[0]) # remove empty dimensions return list(_normalize_data(x) for x in data) if isinstance(data, dict): return {k: _normalize_data(v) for (k, v) in data.items()} return data def set_data_normalized(span, key, value): # type: (Span, str, Any) -> None normalized = _normalize_data(value) span.set_data(key, normalized) sentry-python-2.18.0/sentry_sdk/api.py000066400000000000000000000260421471214654000177650ustar00rootroot00000000000000import inspect import warnings from contextlib import contextmanager from sentry_sdk import tracing_utils, Client from sentry_sdk._init_implementation import init from sentry_sdk.consts import INSTRUMENTER from sentry_sdk.scope import Scope, _ScopeManager, new_scope, isolation_scope from sentry_sdk.tracing import NoOpSpan, Transaction, trace from sentry_sdk.crons import monitor from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Mapping from typing import Any from typing import Dict from typing import Generator from typing import Optional from typing import overload from typing import Callable from typing import TypeVar from typing import ContextManager from typing import Union from typing_extensions import Unpack from sentry_sdk.client import BaseClient from sentry_sdk._types import ( Event, Hint, Breadcrumb, BreadcrumbHint, ExcInfo, MeasurementUnit, LogLevelStr, SamplingContext, ) from sentry_sdk.tracing import Span, TransactionKwargs T = TypeVar("T") F = TypeVar("F", bound=Callable[..., Any]) else: def overload(x): # type: (T) -> T return x # When changing this, update __all__ in __init__.py too __all__ = [ "init", "add_breadcrumb", "capture_event", "capture_exception", "capture_message", "configure_scope", "continue_trace", "flush", "get_baggage", "get_client", "get_global_scope", "get_isolation_scope", "get_current_scope", "get_current_span", "get_traceparent", "is_initialized", "isolation_scope", "last_event_id", "new_scope", "push_scope", "set_context", "set_extra", "set_level", "set_measurement", "set_tag", "set_tags", "set_user", "start_span", "start_transaction", "trace", "monitor", ] def scopemethod(f): # type: (F) -> F f.__doc__ = "%s\n\n%s" % ( "Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__, inspect.getdoc(getattr(Scope, f.__name__)), ) return f def clientmethod(f): # type: (F) -> F f.__doc__ = "%s\n\n%s" % ( "Alias for :py:meth:`sentry_sdk.Client.%s`" % f.__name__, inspect.getdoc(getattr(Client, f.__name__)), ) return f @scopemethod def get_client(): # type: () -> BaseClient return Scope.get_client() def is_initialized(): # type: () -> bool """ .. versionadded:: 2.0.0 Returns whether Sentry has been initialized or not. If a client is available and the client is active (meaning it is configured to send data) then Sentry is initialized. """ return get_client().is_active() @scopemethod def get_global_scope(): # type: () -> Scope return Scope.get_global_scope() @scopemethod def get_isolation_scope(): # type: () -> Scope return Scope.get_isolation_scope() @scopemethod def get_current_scope(): # type: () -> Scope return Scope.get_current_scope() @scopemethod def last_event_id(): # type: () -> Optional[str] """ See :py:meth:`sentry_sdk.Scope.last_event_id` documentation regarding this method's limitations. """ return Scope.last_event_id() @scopemethod def capture_event( event, # type: Event hint=None, # type: Optional[Hint] scope=None, # type: Optional[Any] **scope_kwargs, # type: Any ): # type: (...) -> Optional[str] return get_current_scope().capture_event(event, hint, scope=scope, **scope_kwargs) @scopemethod def capture_message( message, # type: str level=None, # type: Optional[LogLevelStr] scope=None, # type: Optional[Any] **scope_kwargs, # type: Any ): # type: (...) -> Optional[str] return get_current_scope().capture_message( message, level, scope=scope, **scope_kwargs ) @scopemethod def capture_exception( error=None, # type: Optional[Union[BaseException, ExcInfo]] scope=None, # type: Optional[Any] **scope_kwargs, # type: Any ): # type: (...) -> Optional[str] return get_current_scope().capture_exception(error, scope=scope, **scope_kwargs) @scopemethod def add_breadcrumb( crumb=None, # type: Optional[Breadcrumb] hint=None, # type: Optional[BreadcrumbHint] **kwargs, # type: Any ): # type: (...) -> None return get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) @overload def configure_scope(): # type: () -> ContextManager[Scope] pass @overload def configure_scope( # noqa: F811 callback, # type: Callable[[Scope], None] ): # type: (...) -> None pass def configure_scope( # noqa: F811 callback=None, # type: Optional[Callable[[Scope], None]] ): # type: (...) -> Optional[ContextManager[Scope]] """ Reconfigures the scope. :param callback: If provided, call the callback with the current scope. :returns: If no callback is provided, returns a context manager that returns the scope. """ warnings.warn( "sentry_sdk.configure_scope is deprecated and will be removed in the next major version. " "Please consult our migration guide to learn how to migrate to the new API: " "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-configuring", DeprecationWarning, stacklevel=2, ) scope = get_isolation_scope() scope.generate_propagation_context() if callback is not None: # TODO: used to return None when client is None. Check if this changes behavior. callback(scope) return None @contextmanager def inner(): # type: () -> Generator[Scope, None, None] yield scope return inner() @overload def push_scope(): # type: () -> ContextManager[Scope] pass @overload def push_scope( # noqa: F811 callback, # type: Callable[[Scope], None] ): # type: (...) -> None pass def push_scope( # noqa: F811 callback=None, # type: Optional[Callable[[Scope], None]] ): # type: (...) -> Optional[ContextManager[Scope]] """ Pushes a new layer on the scope stack. :param callback: If provided, this method pushes a scope, calls `callback`, and pops the scope again. :returns: If no `callback` is provided, a context manager that should be used to pop the scope again. """ warnings.warn( "sentry_sdk.push_scope is deprecated and will be removed in the next major version. " "Please consult our migration guide to learn how to migrate to the new API: " "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-pushing", DeprecationWarning, stacklevel=2, ) if callback is not None: with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) with push_scope() as scope: callback(scope) return None return _ScopeManager() @scopemethod def set_tag(key, value): # type: (str, Any) -> None return get_isolation_scope().set_tag(key, value) @scopemethod def set_tags(tags): # type: (Mapping[str, object]) -> None return get_isolation_scope().set_tags(tags) @scopemethod def set_context(key, value): # type: (str, Dict[str, Any]) -> None return get_isolation_scope().set_context(key, value) @scopemethod def set_extra(key, value): # type: (str, Any) -> None return get_isolation_scope().set_extra(key, value) @scopemethod def set_user(value): # type: (Optional[Dict[str, Any]]) -> None return get_isolation_scope().set_user(value) @scopemethod def set_level(value): # type: (LogLevelStr) -> None return get_isolation_scope().set_level(value) @clientmethod def flush( timeout=None, # type: Optional[float] callback=None, # type: Optional[Callable[[int, float], None]] ): # type: (...) -> None return get_client().flush(timeout=timeout, callback=callback) @scopemethod def start_span( **kwargs, # type: Any ): # type: (...) -> Span return get_current_scope().start_span(**kwargs) @scopemethod def start_transaction( transaction=None, # type: Optional[Transaction] instrumenter=INSTRUMENTER.SENTRY, # type: str custom_sampling_context=None, # type: Optional[SamplingContext] **kwargs, # type: Unpack[TransactionKwargs] ): # type: (...) -> Union[Transaction, NoOpSpan] """ Start and return a transaction on the current scope. Start an existing transaction if given, otherwise create and start a new transaction with kwargs. This is the entry point to manual tracing instrumentation. A tree structure can be built by adding child spans to the transaction, and child spans to other spans. To start a new child span within the transaction or any span, call the respective `.start_child()` method. Every child span must be finished before the transaction is finished, otherwise the unfinished spans are discarded. When used as context managers, spans and transactions are automatically finished at the end of the `with` block. If not using context managers, call the `.finish()` method. When the transaction is finished, it will be sent to Sentry with all its finished child spans. :param transaction: The transaction to start. If omitted, we create and start a new transaction. :param instrumenter: This parameter is meant for internal use only. It will be removed in the next major version. :param custom_sampling_context: The transaction's custom sampling context. :param kwargs: Optional keyword arguments to be passed to the Transaction constructor. See :py:class:`sentry_sdk.tracing.Transaction` for available arguments. """ return get_current_scope().start_transaction( transaction, instrumenter, custom_sampling_context, **kwargs ) def set_measurement(name, value, unit=""): # type: (str, float, MeasurementUnit) -> None transaction = get_current_scope().transaction if transaction is not None: transaction.set_measurement(name, value, unit) def get_current_span(scope=None): # type: (Optional[Scope]) -> Optional[Span] """ Returns the currently active span if there is one running, otherwise `None` """ return tracing_utils.get_current_span(scope) def get_traceparent(): # type: () -> Optional[str] """ Returns the traceparent either from the active span or from the scope. """ return get_current_scope().get_traceparent() def get_baggage(): # type: () -> Optional[str] """ Returns Baggage either from the active span or from the scope. """ baggage = get_current_scope().get_baggage() if baggage is not None: return baggage.serialize() return None def continue_trace( environ_or_headers, op=None, name=None, source=None, origin="manual" ): # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], str) -> Transaction """ Sets the propagation context from environment or headers and returns a transaction. """ return get_isolation_scope().continue_trace( environ_or_headers, op, name, source, origin ) sentry-python-2.18.0/sentry_sdk/attachments.py000066400000000000000000000060451471214654000215300ustar00rootroot00000000000000import os import mimetypes from sentry_sdk.envelope import Item, PayloadRef from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional, Union, Callable class Attachment: """Additional files/data to send along with an event. This class stores attachments that can be sent along with an event. Attachments are files or other data, e.g. config or log files, that are relevant to an event. Attachments are set on the ``Scope``, and are sent along with all non-transaction events (or all events including transactions if ``add_to_transactions`` is ``True``) that are captured within the ``Scope``. To add an attachment to a ``Scope``, use :py:meth:`sentry_sdk.Scope.add_attachment`. The parameters for ``add_attachment`` are the same as the parameters for this class's constructor. :param bytes: Raw bytes of the attachment, or a function that returns the raw bytes. Must be provided unless ``path`` is provided. :param filename: The filename of the attachment. Must be provided unless ``path`` is provided. :param path: Path to a file to attach. Must be provided unless ``bytes`` is provided. :param content_type: The content type of the attachment. If not provided, it will be guessed from the ``filename`` parameter, if available, or the ``path`` parameter if ``filename`` is ``None``. :param add_to_transactions: Whether to add this attachment to transactions. Defaults to ``False``. """ def __init__( self, bytes=None, # type: Union[None, bytes, Callable[[], bytes]] filename=None, # type: Optional[str] path=None, # type: Optional[str] content_type=None, # type: Optional[str] add_to_transactions=False, # type: bool ): # type: (...) -> None if bytes is None and path is None: raise TypeError("path or raw bytes required for attachment") if filename is None and path is not None: filename = os.path.basename(path) if filename is None: raise TypeError("filename is required for attachment") if content_type is None: content_type = mimetypes.guess_type(filename)[0] self.bytes = bytes self.filename = filename self.path = path self.content_type = content_type self.add_to_transactions = add_to_transactions def to_envelope_item(self): # type: () -> Item """Returns an envelope item for this attachment.""" payload = None # type: Union[None, PayloadRef, bytes] if self.bytes is not None: if callable(self.bytes): payload = self.bytes() else: payload = self.bytes else: payload = PayloadRef(path=self.path) return Item( payload=payload, type="attachment", content_type=self.content_type, filename=self.filename, ) def __repr__(self): # type: () -> str return "" % (self.filename,) sentry-python-2.18.0/sentry_sdk/client.py000066400000000000000000000775661471214654000205130ustar00rootroot00000000000000import os import uuid import random import socket from collections.abc import Mapping from datetime import datetime, timezone from importlib import import_module from typing import cast, overload from sentry_sdk._compat import PY37, check_uwsgi_thread_support from sentry_sdk.utils import ( ContextVar, capture_internal_exceptions, current_stacktrace, env_to_bool, format_timestamp, get_sdk_name, get_type_name, get_default_release, handle_in_app, is_gevent, logger, ) from sentry_sdk.serializer import serialize from sentry_sdk.tracing import trace from sentry_sdk.transport import BaseHttpTransport, make_transport from sentry_sdk.consts import ( DEFAULT_MAX_VALUE_LENGTH, DEFAULT_OPTIONS, INSTRUMENTER, VERSION, ClientConstructor, ) from sentry_sdk.integrations import _DEFAULT_INTEGRATIONS, setup_integrations from sentry_sdk.sessions import SessionFlusher from sentry_sdk.envelope import Envelope from sentry_sdk.profiler.continuous_profiler import setup_continuous_profiler from sentry_sdk.profiler.transaction_profiler import ( has_profiling_enabled, Profile, setup_profiler, ) from sentry_sdk.scrubber import EventScrubber from sentry_sdk.monitor import Monitor from sentry_sdk.spotlight import setup_spotlight from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Optional from typing import Sequence from typing import Type from typing import Union from typing import TypeVar from sentry_sdk._types import Event, Hint, SDKInfo from sentry_sdk.integrations import Integration from sentry_sdk.metrics import MetricsAggregator from sentry_sdk.scope import Scope from sentry_sdk.session import Session from sentry_sdk.spotlight import SpotlightClient from sentry_sdk.transport import Transport I = TypeVar("I", bound=Integration) # noqa: E741 _client_init_debug = ContextVar("client_init_debug") SDK_INFO = { "name": "sentry.python", # SDK name will be overridden after integrations have been loaded with sentry_sdk.integrations.setup_integrations() "version": VERSION, "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}], } # type: SDKInfo def _get_options(*args, **kwargs): # type: (*Optional[str], **Any) -> Dict[str, Any] if args and (isinstance(args[0], (bytes, str)) or args[0] is None): dsn = args[0] # type: Optional[str] args = args[1:] else: dsn = None if len(args) > 1: raise TypeError("Only single positional argument is expected") rv = dict(DEFAULT_OPTIONS) options = dict(*args, **kwargs) if dsn is not None and options.get("dsn") is None: options["dsn"] = dsn for key, value in options.items(): if key not in rv: raise TypeError("Unknown option %r" % (key,)) rv[key] = value if rv["dsn"] is None: rv["dsn"] = os.environ.get("SENTRY_DSN") if rv["release"] is None: rv["release"] = get_default_release() if rv["environment"] is None: rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT") or "production" if rv["debug"] is None: rv["debug"] = env_to_bool(os.environ.get("SENTRY_DEBUG", "False"), strict=True) if rv["server_name"] is None and hasattr(socket, "gethostname"): rv["server_name"] = socket.gethostname() if rv["instrumenter"] is None: rv["instrumenter"] = INSTRUMENTER.SENTRY if rv["project_root"] is None: try: project_root = os.getcwd() except Exception: project_root = None rv["project_root"] = project_root if rv["enable_tracing"] is True and rv["traces_sample_rate"] is None: rv["traces_sample_rate"] = 1.0 if rv["event_scrubber"] is None: rv["event_scrubber"] = EventScrubber(send_default_pii=rv["send_default_pii"]) if rv["socket_options"] and not isinstance(rv["socket_options"], list): logger.warning( "Ignoring socket_options because of unexpected format. See urllib3.HTTPConnection.socket_options for the expected format." ) rv["socket_options"] = None return rv try: # Python 3.6+ module_not_found_error = ModuleNotFoundError except Exception: # Older Python versions module_not_found_error = ImportError # type: ignore class BaseClient: """ .. versionadded:: 2.0.0 The basic definition of a client that is used for sending data to Sentry. """ spotlight = None # type: Optional[SpotlightClient] def __init__(self, options=None): # type: (Optional[Dict[str, Any]]) -> None self.options = ( options if options is not None else DEFAULT_OPTIONS ) # type: Dict[str, Any] self.transport = None # type: Optional[Transport] self.monitor = None # type: Optional[Monitor] self.metrics_aggregator = None # type: Optional[MetricsAggregator] def __getstate__(self, *args, **kwargs): # type: (*Any, **Any) -> Any return {"options": {}} def __setstate__(self, *args, **kwargs): # type: (*Any, **Any) -> None pass @property def dsn(self): # type: () -> Optional[str] return None def should_send_default_pii(self): # type: () -> bool return False def is_active(self): # type: () -> bool """ .. versionadded:: 2.0.0 Returns whether the client is active (able to send data to Sentry) """ return False def capture_event(self, *args, **kwargs): # type: (*Any, **Any) -> Optional[str] return None def capture_session(self, *args, **kwargs): # type: (*Any, **Any) -> None return None if TYPE_CHECKING: @overload def get_integration(self, name_or_class): # type: (str) -> Optional[Integration] ... @overload def get_integration(self, name_or_class): # type: (type[I]) -> Optional[I] ... def get_integration(self, name_or_class): # type: (Union[str, type[Integration]]) -> Optional[Integration] return None def close(self, *args, **kwargs): # type: (*Any, **Any) -> None return None def flush(self, *args, **kwargs): # type: (*Any, **Any) -> None return None def __enter__(self): # type: () -> BaseClient return self def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None return None class NonRecordingClient(BaseClient): """ .. versionadded:: 2.0.0 A client that does not send any events to Sentry. This is used as a fallback when the Sentry SDK is not yet initialized. """ pass class _Client(BaseClient): """ The client is internally responsible for capturing the events and forwarding them to sentry through the configured transport. It takes the client options as keyword arguments and optionally the DSN as first argument. Alias of :py:class:`sentry_sdk.Client`. (Was created for better intelisense support) """ def __init__(self, *args, **kwargs): # type: (*Any, **Any) -> None super(_Client, self).__init__(options=get_options(*args, **kwargs)) self._init_impl() def __getstate__(self): # type: () -> Any return {"options": self.options} def __setstate__(self, state): # type: (Any) -> None self.options = state["options"] self._init_impl() def _setup_instrumentation(self, functions_to_trace): # type: (Sequence[Dict[str, str]]) -> None """ Instruments the functions given in the list `functions_to_trace` with the `@sentry_sdk.tracing.trace` decorator. """ for function in functions_to_trace: class_name = None function_qualname = function["qualified_name"] module_name, function_name = function_qualname.rsplit(".", 1) try: # Try to import module and function # ex: "mymodule.submodule.funcname" module_obj = import_module(module_name) function_obj = getattr(module_obj, function_name) setattr(module_obj, function_name, trace(function_obj)) logger.debug("Enabled tracing for %s", function_qualname) except module_not_found_error: try: # Try to import a class # ex: "mymodule.submodule.MyClassName.member_function" module_name, class_name = module_name.rsplit(".", 1) module_obj = import_module(module_name) class_obj = getattr(module_obj, class_name) function_obj = getattr(class_obj, function_name) function_type = type(class_obj.__dict__[function_name]) traced_function = trace(function_obj) if function_type in (staticmethod, classmethod): traced_function = staticmethod(traced_function) setattr(class_obj, function_name, traced_function) setattr(module_obj, class_name, class_obj) logger.debug("Enabled tracing for %s", function_qualname) except Exception as e: logger.warning( "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.", function_qualname, e, ) except Exception as e: logger.warning( "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.", function_qualname, e, ) def _init_impl(self): # type: () -> None old_debug = _client_init_debug.get(False) def _capture_envelope(envelope): # type: (Envelope) -> None if self.transport is not None: self.transport.capture_envelope(envelope) try: _client_init_debug.set(self.options["debug"]) self.transport = make_transport(self.options) self.monitor = None if self.transport: if self.options["enable_backpressure_handling"]: self.monitor = Monitor(self.transport) self.session_flusher = SessionFlusher(capture_func=_capture_envelope) self.metrics_aggregator = None # type: Optional[MetricsAggregator] experiments = self.options.get("_experiments", {}) if experiments.get("enable_metrics", True): # Context vars are not working correctly on Python <=3.6 # with gevent. metrics_supported = not is_gevent() or PY37 if metrics_supported: from sentry_sdk.metrics import MetricsAggregator self.metrics_aggregator = MetricsAggregator( capture_func=_capture_envelope, enable_code_locations=bool( experiments.get("metric_code_locations", True) ), ) else: logger.info( "Metrics not supported on Python 3.6 and lower with gevent." ) max_request_body_size = ("always", "never", "small", "medium") if self.options["max_request_body_size"] not in max_request_body_size: raise ValueError( "Invalid value for max_request_body_size. Must be one of {}".format( max_request_body_size ) ) if self.options["_experiments"].get("otel_powered_performance", False): logger.debug( "[OTel] Enabling experimental OTel-powered performance monitoring." ) self.options["instrumenter"] = INSTRUMENTER.OTEL if ( "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration" not in _DEFAULT_INTEGRATIONS ): _DEFAULT_INTEGRATIONS.append( "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration", ) self.integrations = setup_integrations( self.options["integrations"], with_defaults=self.options["default_integrations"], with_auto_enabling_integrations=self.options[ "auto_enabling_integrations" ], disabled_integrations=self.options["disabled_integrations"], ) spotlight_config = self.options.get("spotlight") if spotlight_config is None and "SENTRY_SPOTLIGHT" in os.environ: spotlight_env_value = os.environ["SENTRY_SPOTLIGHT"] spotlight_config = env_to_bool(spotlight_env_value, strict=True) self.options["spotlight"] = ( spotlight_config if spotlight_config is not None else spotlight_env_value ) if self.options.get("spotlight"): self.spotlight = setup_spotlight(self.options) sdk_name = get_sdk_name(list(self.integrations.keys())) SDK_INFO["name"] = sdk_name logger.debug("Setting SDK name to '%s'", sdk_name) if has_profiling_enabled(self.options): try: setup_profiler(self.options) except Exception as e: logger.debug("Can not set up profiler. (%s)", e) else: try: setup_continuous_profiler( self.options, sdk_info=SDK_INFO, capture_func=_capture_envelope, ) except Exception as e: logger.debug("Can not set up continuous profiler. (%s)", e) finally: _client_init_debug.set(old_debug) self._setup_instrumentation(self.options.get("functions_to_trace", [])) if ( self.monitor or self.metrics_aggregator or has_profiling_enabled(self.options) or isinstance(self.transport, BaseHttpTransport) ): # If we have anything on that could spawn a background thread, we # need to check if it's safe to use them. check_uwsgi_thread_support() def is_active(self): # type: () -> bool """ .. versionadded:: 2.0.0 Returns whether the client is active (able to send data to Sentry) """ return True def should_send_default_pii(self): # type: () -> bool """ .. versionadded:: 2.0.0 Returns whether the client should send default PII (Personally Identifiable Information) data to Sentry. """ return self.options.get("send_default_pii", False) @property def dsn(self): # type: () -> Optional[str] """Returns the configured DSN as string.""" return self.options["dsn"] def _prepare_event( self, event, # type: Event hint, # type: Hint scope, # type: Optional[Scope] ): # type: (...) -> Optional[Event] if event.get("timestamp") is None: event["timestamp"] = datetime.now(timezone.utc) if scope is not None: is_transaction = event.get("type") == "transaction" spans_before = len(event.get("spans", [])) event_ = scope.apply_to_event(event, hint, self.options) # one of the event/error processors returned None if event_ is None: if self.transport: self.transport.record_lost_event( "event_processor", data_category=("transaction" if is_transaction else "error"), ) if is_transaction: self.transport.record_lost_event( "event_processor", data_category="span", quantity=spans_before + 1, # +1 for the transaction itself ) return None event = event_ spans_delta = spans_before - len(event.get("spans", [])) if is_transaction and spans_delta > 0 and self.transport is not None: self.transport.record_lost_event( "event_processor", data_category="span", quantity=spans_delta ) if ( self.options["attach_stacktrace"] and "exception" not in event and "stacktrace" not in event and "threads" not in event ): with capture_internal_exceptions(): event["threads"] = { "values": [ { "stacktrace": current_stacktrace( include_local_variables=self.options.get( "include_local_variables", True ), max_value_length=self.options.get( "max_value_length", DEFAULT_MAX_VALUE_LENGTH ), ), "crashed": False, "current": True, } ] } for key in "release", "environment", "server_name", "dist": if event.get(key) is None and self.options[key] is not None: event[key] = str(self.options[key]).strip() # type: ignore[literal-required] if event.get("sdk") is None: sdk_info = dict(SDK_INFO) sdk_info["integrations"] = sorted(self.integrations.keys()) event["sdk"] = sdk_info if event.get("platform") is None: event["platform"] = "python" event = handle_in_app( event, self.options["in_app_exclude"], self.options["in_app_include"], self.options["project_root"], ) if event is not None: event_scrubber = self.options["event_scrubber"] if event_scrubber: event_scrubber.scrub_event(event) # Postprocess the event here so that annotated types do # generally not surface in before_send if event is not None: event = cast( "Event", serialize( cast("Dict[str, Any]", event), max_request_body_size=self.options.get("max_request_body_size"), max_value_length=self.options.get("max_value_length"), custom_repr=self.options.get("custom_repr"), ), ) before_send = self.options["before_send"] if ( before_send is not None and event is not None and event.get("type") != "transaction" ): new_event = None with capture_internal_exceptions(): new_event = before_send(event, hint or {}) if new_event is None: logger.info("before send dropped event") if self.transport: self.transport.record_lost_event( "before_send", data_category="error" ) event = new_event # type: ignore before_send_transaction = self.options["before_send_transaction"] if ( before_send_transaction is not None and event is not None and event.get("type") == "transaction" ): new_event = None spans_before = len(event.get("spans", [])) with capture_internal_exceptions(): new_event = before_send_transaction(event, hint or {}) if new_event is None: logger.info("before send transaction dropped event") if self.transport: self.transport.record_lost_event( reason="before_send", data_category="transaction" ) self.transport.record_lost_event( reason="before_send", data_category="span", quantity=spans_before + 1, # +1 for the transaction itself ) else: spans_delta = spans_before - len(new_event.get("spans", [])) if spans_delta > 0 and self.transport is not None: self.transport.record_lost_event( reason="before_send", data_category="span", quantity=spans_delta ) event = new_event # type: ignore return event def _is_ignored_error(self, event, hint): # type: (Event, Hint) -> bool exc_info = hint.get("exc_info") if exc_info is None: return False error = exc_info[0] error_type_name = get_type_name(exc_info[0]) error_full_name = "%s.%s" % (exc_info[0].__module__, error_type_name) for ignored_error in self.options["ignore_errors"]: # String types are matched against the type name in the # exception only if isinstance(ignored_error, str): if ignored_error == error_full_name or ignored_error == error_type_name: return True else: if issubclass(error, ignored_error): return True return False def _should_capture( self, event, # type: Event hint, # type: Hint scope=None, # type: Optional[Scope] ): # type: (...) -> bool # Transactions are sampled independent of error events. is_transaction = event.get("type") == "transaction" if is_transaction: return True ignoring_prevents_recursion = scope is not None and not scope._should_capture if ignoring_prevents_recursion: return False ignored_by_config_option = self._is_ignored_error(event, hint) if ignored_by_config_option: return False return True def _should_sample_error( self, event, # type: Event hint, # type: Hint ): # type: (...) -> bool error_sampler = self.options.get("error_sampler", None) if callable(error_sampler): with capture_internal_exceptions(): sample_rate = error_sampler(event, hint) else: sample_rate = self.options["sample_rate"] try: not_in_sample_rate = sample_rate < 1.0 and random.random() >= sample_rate except NameError: logger.warning( "The provided error_sampler raised an error. Defaulting to sampling the event." ) # If the error_sampler raised an error, we should sample the event, since the default behavior # (when no sample_rate or error_sampler is provided) is to sample all events. not_in_sample_rate = False except TypeError: parameter, verb = ( ("error_sampler", "returned") if callable(error_sampler) else ("sample_rate", "contains") ) logger.warning( "The provided %s %s an invalid value of %s. The value should be a float or a bool. Defaulting to sampling the event." % (parameter, verb, repr(sample_rate)) ) # If the sample_rate has an invalid value, we should sample the event, since the default behavior # (when no sample_rate or error_sampler is provided) is to sample all events. not_in_sample_rate = False if not_in_sample_rate: # because we will not sample this event, record a "lost event". if self.transport: self.transport.record_lost_event("sample_rate", data_category="error") return False return True def _update_session_from_event( self, session, # type: Session event, # type: Event ): # type: (...) -> None crashed = False errored = False user_agent = None exceptions = (event.get("exception") or {}).get("values") if exceptions: errored = True for error in exceptions: mechanism = error.get("mechanism") if isinstance(mechanism, Mapping) and mechanism.get("handled") is False: crashed = True break user = event.get("user") if session.user_agent is None: headers = (event.get("request") or {}).get("headers") headers_dict = headers if isinstance(headers, dict) else {} for k, v in headers_dict.items(): if k.lower() == "user-agent": user_agent = v break session.update( status="crashed" if crashed else None, user=user, user_agent=user_agent, errors=session.errors + (errored or crashed), ) def capture_event( self, event, # type: Event hint=None, # type: Optional[Hint] scope=None, # type: Optional[Scope] ): # type: (...) -> Optional[str] """Captures an event. :param event: A ready-made event that can be directly sent to Sentry. :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object. :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. :returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help. """ hint = dict(hint or ()) # type: Hint if not self._should_capture(event, hint, scope): return None profile = event.pop("profile", None) event_id = event.get("event_id") if event_id is None: event["event_id"] = event_id = uuid.uuid4().hex event_opt = self._prepare_event(event, hint, scope) if event_opt is None: return None # whenever we capture an event we also check if the session needs # to be updated based on that information. session = scope._session if scope else None if session: self._update_session_from_event(session, event) is_transaction = event_opt.get("type") == "transaction" is_checkin = event_opt.get("type") == "check_in" if ( not is_transaction and not is_checkin and not self._should_sample_error(event, hint) ): return None attachments = hint.get("attachments") trace_context = event_opt.get("contexts", {}).get("trace") or {} dynamic_sampling_context = trace_context.pop("dynamic_sampling_context", {}) headers = { "event_id": event_opt["event_id"], "sent_at": format_timestamp(datetime.now(timezone.utc)), } # type: dict[str, object] if dynamic_sampling_context: headers["trace"] = dynamic_sampling_context envelope = Envelope(headers=headers) if is_transaction: if isinstance(profile, Profile): envelope.add_profile(profile.to_json(event_opt, self.options)) envelope.add_transaction(event_opt) elif is_checkin: envelope.add_checkin(event_opt) else: envelope.add_event(event_opt) for attachment in attachments or (): envelope.add_item(attachment.to_envelope_item()) return_value = None if self.spotlight: self.spotlight.capture_envelope(envelope) return_value = event_id if self.transport is not None: self.transport.capture_envelope(envelope) return_value = event_id return return_value def capture_session( self, session # type: Session ): # type: (...) -> None if not session.release: logger.info("Discarded session update because of missing release") else: self.session_flusher.add_session(session) if TYPE_CHECKING: @overload def get_integration(self, name_or_class): # type: (str) -> Optional[Integration] ... @overload def get_integration(self, name_or_class): # type: (type[I]) -> Optional[I] ... def get_integration( self, name_or_class # type: Union[str, Type[Integration]] ): # type: (...) -> Optional[Integration] """Returns the integration for this client by name or class. If the client does not have that integration then `None` is returned. """ if isinstance(name_or_class, str): integration_name = name_or_class elif name_or_class.identifier is not None: integration_name = name_or_class.identifier else: raise ValueError("Integration has no name") return self.integrations.get(integration_name) def close( self, timeout=None, # type: Optional[float] callback=None, # type: Optional[Callable[[int, float], None]] ): # type: (...) -> None """ Close the client and shut down the transport. Arguments have the same semantics as :py:meth:`Client.flush`. """ if self.transport is not None: self.flush(timeout=timeout, callback=callback) self.session_flusher.kill() if self.metrics_aggregator is not None: self.metrics_aggregator.kill() if self.monitor: self.monitor.kill() self.transport.kill() self.transport = None def flush( self, timeout=None, # type: Optional[float] callback=None, # type: Optional[Callable[[int, float], None]] ): # type: (...) -> None """ Wait for the current events to be sent. :param timeout: Wait for at most `timeout` seconds. If no `timeout` is provided, the `shutdown_timeout` option value is used. :param callback: Is invoked with the number of pending events and the configured timeout. """ if self.transport is not None: if timeout is None: timeout = self.options["shutdown_timeout"] self.session_flusher.flush() if self.metrics_aggregator is not None: self.metrics_aggregator.flush() self.transport.flush(timeout=timeout, callback=callback) def __enter__(self): # type: () -> _Client return self def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None self.close() from typing import TYPE_CHECKING if TYPE_CHECKING: # Make mypy, PyCharm and other static analyzers think `get_options` is a # type to have nicer autocompletion for params. # # Use `ClientConstructor` to define the argument types of `init` and # `Dict[str, Any]` to tell static analyzers about the return type. class get_options(ClientConstructor, Dict[str, Any]): # noqa: N801 pass class Client(ClientConstructor, _Client): pass else: # Alias `get_options` for actual usage. Go through the lambda indirection # to throw PyCharm off of the weakly typed signature (it would otherwise # discover both the weakly typed signature of `_init` and our faked `init` # type). get_options = (lambda: _get_options)() Client = (lambda: _Client)() sentry-python-2.18.0/sentry_sdk/consts.py000066400000000000000000000441651471214654000205330ustar00rootroot00000000000000import itertools from enum import Enum from typing import TYPE_CHECKING # up top to prevent circular import due to integration import DEFAULT_MAX_VALUE_LENGTH = 1024 # Also needs to be at the top to prevent circular import class EndpointType(Enum): """ The type of an endpoint. This is an enum, rather than a constant, for historical reasons (the old /store endpoint). The enum also preserve future compatibility, in case we ever have a new endpoint. """ ENVELOPE = "envelope" class CompressionAlgo(Enum): GZIP = "gzip" BROTLI = "br" if TYPE_CHECKING: import sentry_sdk from typing import Optional from typing import Callable from typing import Union from typing import List from typing import Type from typing import Dict from typing import Any from typing import Sequence from typing import Tuple from typing_extensions import TypedDict from sentry_sdk._types import ( BreadcrumbProcessor, ContinuousProfilerMode, Event, EventProcessor, Hint, MeasurementUnit, ProfilerMode, TracesSampler, TransactionProcessor, MetricTags, MetricValue, ) # Experiments are feature flags to enable and disable certain unstable SDK # functionality. Changing them from the defaults (`None`) in production # code is highly discouraged. They are not subject to any stability # guarantees such as the ones from semantic versioning. Experiments = TypedDict( "Experiments", { "max_spans": Optional[int], "max_flags": Optional[int], "record_sql_params": Optional[bool], "continuous_profiling_auto_start": Optional[bool], "continuous_profiling_mode": Optional[ContinuousProfilerMode], "otel_powered_performance": Optional[bool], "transport_zlib_compression_level": Optional[int], "transport_compression_level": Optional[int], "transport_compression_algo": Optional[CompressionAlgo], "transport_num_pools": Optional[int], "transport_http2": Optional[bool], "enable_metrics": Optional[bool], "before_emit_metric": Optional[ Callable[[str, MetricValue, MeasurementUnit, MetricTags], bool] ], "metric_code_locations": Optional[bool], }, total=False, ) DEFAULT_QUEUE_SIZE = 100 DEFAULT_MAX_BREADCRUMBS = 100 MATCH_ALL = r".*" FALSE_VALUES = [ "false", "no", "off", "n", "0", ] class INSTRUMENTER: SENTRY = "sentry" OTEL = "otel" class SPANDATA: """ Additional information describing the type of the span. See: https://develop.sentry.dev/sdk/performance/span-data-conventions/ """ AI_FREQUENCY_PENALTY = "ai.frequency_penalty" """ Used to reduce repetitiveness of generated tokens. Example: 0.5 """ AI_PRESENCE_PENALTY = "ai.presence_penalty" """ Used to reduce repetitiveness of generated tokens. Example: 0.5 """ AI_INPUT_MESSAGES = "ai.input_messages" """ The input messages to an LLM call. Example: [{"role": "user", "message": "hello"}] """ AI_MODEL_ID = "ai.model_id" """ The unique descriptor of the model being execugted Example: gpt-4 """ AI_METADATA = "ai.metadata" """ Extra metadata passed to an AI pipeline step. Example: {"executed_function": "add_integers"} """ AI_TAGS = "ai.tags" """ Tags that describe an AI pipeline step. Example: {"executed_function": "add_integers"} """ AI_STREAMING = "ai.streaming" """ Whether or not the AI model call's repsonse was streamed back asynchronously Example: true """ AI_TEMPERATURE = "ai.temperature" """ For an AI model call, the temperature parameter. Temperature essentially means how random the output will be. Example: 0.5 """ AI_TOP_P = "ai.top_p" """ For an AI model call, the top_p parameter. Top_p essentially controls how random the output will be. Example: 0.5 """ AI_TOP_K = "ai.top_k" """ For an AI model call, the top_k parameter. Top_k essentially controls how random the output will be. Example: 35 """ AI_FUNCTION_CALL = "ai.function_call" """ For an AI model call, the function that was called. This is deprecated for OpenAI, and replaced by tool_calls """ AI_TOOL_CALLS = "ai.tool_calls" """ For an AI model call, the function that was called. This is deprecated for OpenAI, and replaced by tool_calls """ AI_TOOLS = "ai.tools" """ For an AI model call, the functions that are available """ AI_RESPONSE_FORMAT = "ai.response_format" """ For an AI model call, the format of the response """ AI_LOGIT_BIAS = "ai.response_format" """ For an AI model call, the logit bias """ AI_PREAMBLE = "ai.preamble" """ For an AI model call, the preamble parameter. Preambles are a part of the prompt used to adjust the model's overall behavior and conversation style. Example: "You are now a clown." """ AI_RAW_PROMPTING = "ai.raw_prompting" """ Minimize pre-processing done to the prompt sent to the LLM. Example: true """ AI_RESPONSES = "ai.responses" """ The responses to an AI model call. Always as a list. Example: ["hello", "world"] """ AI_SEED = "ai.seed" """ The seed, ideally models given the same seed and same other parameters will produce the exact same output. Example: 123.45 """ DB_NAME = "db.name" """ The name of the database being accessed. For commands that switch the database, this should be set to the target database (even if the command fails). Example: myDatabase """ DB_USER = "db.user" """ The name of the database user used for connecting to the database. See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md Example: my_user """ DB_OPERATION = "db.operation" """ The name of the operation being executed, e.g. the MongoDB command name such as findAndModify, or the SQL keyword. See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md Example: findAndModify, HMSET, SELECT """ DB_SYSTEM = "db.system" """ An identifier for the database management system (DBMS) product being used. See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md Example: postgresql """ DB_MONGODB_COLLECTION = "db.mongodb.collection" """ The MongoDB collection being accessed within the database. See: https://github.com/open-telemetry/semantic-conventions/blob/main/docs/database/mongodb.md#attributes Example: public.users; customers """ CACHE_HIT = "cache.hit" """ A boolean indicating whether the requested data was found in the cache. Example: true """ CACHE_ITEM_SIZE = "cache.item_size" """ The size of the requested data in bytes. Example: 58 """ CACHE_KEY = "cache.key" """ The key of the requested data. Example: template.cache.some_item.867da7e2af8e6b2f3aa7213a4080edb3 """ NETWORK_PEER_ADDRESS = "network.peer.address" """ Peer address of the network connection - IP address or Unix domain socket name. Example: 10.1.2.80, /tmp/my.sock, localhost """ NETWORK_PEER_PORT = "network.peer.port" """ Peer port number of the network connection. Example: 6379 """ HTTP_QUERY = "http.query" """ The Query string present in the URL. Example: ?foo=bar&bar=baz """ HTTP_FRAGMENT = "http.fragment" """ The Fragments present in the URL. Example: #foo=bar """ HTTP_METHOD = "http.method" """ The HTTP method used. Example: GET """ HTTP_STATUS_CODE = "http.response.status_code" """ The HTTP status code as an integer. Example: 418 """ MESSAGING_DESTINATION_NAME = "messaging.destination.name" """ The destination name where the message is being consumed from, e.g. the queue name or topic. """ MESSAGING_MESSAGE_ID = "messaging.message.id" """ The message's identifier. """ MESSAGING_MESSAGE_RETRY_COUNT = "messaging.message.retry.count" """ Number of retries/attempts to process a message. """ MESSAGING_MESSAGE_RECEIVE_LATENCY = "messaging.message.receive.latency" """ The latency between when the task was enqueued and when it was started to be processed. """ MESSAGING_SYSTEM = "messaging.system" """ The messaging system's name, e.g. `kafka`, `aws_sqs` """ SERVER_ADDRESS = "server.address" """ Name of the database host. Example: example.com """ SERVER_PORT = "server.port" """ Logical server port number Example: 80; 8080; 443 """ SERVER_SOCKET_ADDRESS = "server.socket.address" """ Physical server IP address or Unix socket address. Example: 10.5.3.2 """ SERVER_SOCKET_PORT = "server.socket.port" """ Physical server port. Recommended: If different than server.port. Example: 16456 """ CODE_FILEPATH = "code.filepath" """ The source code file name that identifies the code unit as uniquely as possible (preferably an absolute file path). Example: "/app/myapplication/http/handler/server.py" """ CODE_LINENO = "code.lineno" """ The line number in `code.filepath` best representing the operation. It SHOULD point within the code unit named in `code.function`. Example: 42 """ CODE_FUNCTION = "code.function" """ The method or function name, or equivalent (usually rightmost part of the code unit's name). Example: "server_request" """ CODE_NAMESPACE = "code.namespace" """ The "namespace" within which `code.function` is defined. Usually the qualified class or module name, such that `code.namespace` + some separator + `code.function` form a unique identifier for the code unit. Example: "http.handler" """ THREAD_ID = "thread.id" """ Identifier of a thread from where the span originated. This should be a string. Example: "7972576320" """ THREAD_NAME = "thread.name" """ Label identifying a thread from where the span originated. This should be a string. Example: "MainThread" """ PROFILER_ID = "profiler_id" """ Label identifying the profiler id that the span occurred in. This should be a string. Example: "5249fbada8d5416482c2f6e47e337372" """ class SPANSTATUS: """ The status of a Sentry span. See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context """ ABORTED = "aborted" ALREADY_EXISTS = "already_exists" CANCELLED = "cancelled" DATA_LOSS = "data_loss" DEADLINE_EXCEEDED = "deadline_exceeded" FAILED_PRECONDITION = "failed_precondition" INTERNAL_ERROR = "internal_error" INVALID_ARGUMENT = "invalid_argument" NOT_FOUND = "not_found" OK = "ok" OUT_OF_RANGE = "out_of_range" PERMISSION_DENIED = "permission_denied" RESOURCE_EXHAUSTED = "resource_exhausted" UNAUTHENTICATED = "unauthenticated" UNAVAILABLE = "unavailable" UNIMPLEMENTED = "unimplemented" UNKNOWN_ERROR = "unknown_error" class OP: ANTHROPIC_MESSAGES_CREATE = "ai.messages.create.anthropic" CACHE_GET = "cache.get" CACHE_PUT = "cache.put" COHERE_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.cohere" COHERE_EMBEDDINGS_CREATE = "ai.embeddings.create.cohere" DB = "db" DB_REDIS = "db.redis" EVENT_DJANGO = "event.django" FUNCTION = "function" FUNCTION_AWS = "function.aws" FUNCTION_GCP = "function.gcp" GRAPHQL_EXECUTE = "graphql.execute" GRAPHQL_MUTATION = "graphql.mutation" GRAPHQL_PARSE = "graphql.parse" GRAPHQL_RESOLVE = "graphql.resolve" GRAPHQL_SUBSCRIPTION = "graphql.subscription" GRAPHQL_QUERY = "graphql.query" GRAPHQL_VALIDATE = "graphql.validate" GRPC_CLIENT = "grpc.client" GRPC_SERVER = "grpc.server" HTTP_CLIENT = "http.client" HTTP_CLIENT_STREAM = "http.client.stream" HTTP_SERVER = "http.server" MIDDLEWARE_DJANGO = "middleware.django" MIDDLEWARE_LITESTAR = "middleware.litestar" MIDDLEWARE_LITESTAR_RECEIVE = "middleware.litestar.receive" MIDDLEWARE_LITESTAR_SEND = "middleware.litestar.send" MIDDLEWARE_STARLETTE = "middleware.starlette" MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive" MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send" MIDDLEWARE_STARLITE = "middleware.starlite" MIDDLEWARE_STARLITE_RECEIVE = "middleware.starlite.receive" MIDDLEWARE_STARLITE_SEND = "middleware.starlite.send" OPENAI_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.openai" OPENAI_EMBEDDINGS_CREATE = "ai.embeddings.create.openai" HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE = ( "ai.chat_completions.create.huggingface_hub" ) LANGCHAIN_PIPELINE = "ai.pipeline.langchain" LANGCHAIN_RUN = "ai.run.langchain" LANGCHAIN_TOOL = "ai.tool.langchain" LANGCHAIN_AGENT = "ai.agent.langchain" LANGCHAIN_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.langchain" QUEUE_PROCESS = "queue.process" QUEUE_PUBLISH = "queue.publish" QUEUE_SUBMIT_ARQ = "queue.submit.arq" QUEUE_TASK_ARQ = "queue.task.arq" QUEUE_SUBMIT_CELERY = "queue.submit.celery" QUEUE_TASK_CELERY = "queue.task.celery" QUEUE_TASK_RQ = "queue.task.rq" QUEUE_SUBMIT_HUEY = "queue.submit.huey" QUEUE_TASK_HUEY = "queue.task.huey" QUEUE_SUBMIT_RAY = "queue.submit.ray" QUEUE_TASK_RAY = "queue.task.ray" SUBPROCESS = "subprocess" SUBPROCESS_WAIT = "subprocess.wait" SUBPROCESS_COMMUNICATE = "subprocess.communicate" TEMPLATE_RENDER = "template.render" VIEW_RENDER = "view.render" VIEW_RESPONSE_RENDER = "view.response.render" WEBSOCKET_SERVER = "websocket.server" SOCKET_CONNECTION = "socket.connection" SOCKET_DNS = "socket.dns" # This type exists to trick mypy and PyCharm into thinking `init` and `Client` # take these arguments (even though they take opaque **kwargs) class ClientConstructor: def __init__( self, dsn=None, # type: Optional[str] *, max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS, # type: int release=None, # type: Optional[str] environment=None, # type: Optional[str] server_name=None, # type: Optional[str] shutdown_timeout=2, # type: float integrations=[], # type: Sequence[sentry_sdk.integrations.Integration] # noqa: B006 in_app_include=[], # type: List[str] # noqa: B006 in_app_exclude=[], # type: List[str] # noqa: B006 default_integrations=True, # type: bool dist=None, # type: Optional[str] transport=None, # type: Optional[Union[sentry_sdk.transport.Transport, Type[sentry_sdk.transport.Transport], Callable[[Event], None]]] transport_queue_size=DEFAULT_QUEUE_SIZE, # type: int sample_rate=1.0, # type: float send_default_pii=False, # type: bool http_proxy=None, # type: Optional[str] https_proxy=None, # type: Optional[str] ignore_errors=[], # type: Sequence[Union[type, str]] # noqa: B006 max_request_body_size="medium", # type: str socket_options=None, # type: Optional[List[Tuple[int, int, int | bytes]]] keep_alive=False, # type: bool before_send=None, # type: Optional[EventProcessor] before_breadcrumb=None, # type: Optional[BreadcrumbProcessor] debug=None, # type: Optional[bool] attach_stacktrace=False, # type: bool ca_certs=None, # type: Optional[str] propagate_traces=True, # type: bool traces_sample_rate=None, # type: Optional[float] traces_sampler=None, # type: Optional[TracesSampler] profiles_sample_rate=None, # type: Optional[float] profiles_sampler=None, # type: Optional[TracesSampler] profiler_mode=None, # type: Optional[ProfilerMode] auto_enabling_integrations=True, # type: bool disabled_integrations=None, # type: Optional[Sequence[sentry_sdk.integrations.Integration]] auto_session_tracking=True, # type: bool send_client_reports=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 proxy_headers=None, # type: Optional[Dict[str, str]] instrumenter=INSTRUMENTER.SENTRY, # type: Optional[str] before_send_transaction=None, # type: Optional[TransactionProcessor] project_root=None, # type: Optional[str] enable_tracing=None, # type: Optional[bool] include_local_variables=True, # type: Optional[bool] include_source_context=True, # type: Optional[bool] trace_propagation_targets=[ # noqa: B006 MATCH_ALL ], # type: Optional[Sequence[str]] functions_to_trace=[], # type: Sequence[Dict[str, str]] # noqa: B006 event_scrubber=None, # type: Optional[sentry_sdk.scrubber.EventScrubber] max_value_length=DEFAULT_MAX_VALUE_LENGTH, # type: int enable_backpressure_handling=True, # type: bool error_sampler=None, # type: Optional[Callable[[Event, Hint], Union[float, bool]]] enable_db_query_source=True, # type: bool db_query_source_threshold_ms=100, # type: int spotlight=None, # type: Optional[Union[bool, str]] cert_file=None, # type: Optional[str] key_file=None, # type: Optional[str] custom_repr=None, # type: Optional[Callable[..., Optional[str]]] ): # type: (...) -> None pass def _get_default_options(): # type: () -> dict[str, Any] import inspect a = inspect.getfullargspec(ClientConstructor.__init__) defaults = a.defaults or () kwonlydefaults = a.kwonlydefaults or {} return dict( itertools.chain( zip(a.args[-len(defaults) :], defaults), kwonlydefaults.items(), ) ) DEFAULT_OPTIONS = _get_default_options() del _get_default_options VERSION = "2.18.0" sentry-python-2.18.0/sentry_sdk/crons/000077500000000000000000000000001471214654000177625ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/crons/__init__.py000066400000000000000000000003351471214654000220740ustar00rootroot00000000000000from sentry_sdk.crons.api import capture_checkin from sentry_sdk.crons.consts import MonitorStatus from sentry_sdk.crons.decorator import monitor __all__ = [ "capture_checkin", "MonitorStatus", "monitor", ] sentry-python-2.18.0/sentry_sdk/crons/api.py000066400000000000000000000030271471214654000211070ustar00rootroot00000000000000import uuid import sentry_sdk from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional from sentry_sdk._types import Event, MonitorConfig def _create_check_in_event( monitor_slug=None, # type: Optional[str] check_in_id=None, # type: Optional[str] status=None, # type: Optional[str] duration_s=None, # type: Optional[float] monitor_config=None, # type: Optional[MonitorConfig] ): # type: (...) -> Event options = sentry_sdk.get_client().options check_in_id = check_in_id or uuid.uuid4().hex # type: str check_in = { "type": "check_in", "monitor_slug": monitor_slug, "check_in_id": check_in_id, "status": status, "duration": duration_s, "environment": options.get("environment", None), "release": options.get("release", None), } # type: Event if monitor_config: check_in["monitor_config"] = monitor_config return check_in def capture_checkin( monitor_slug=None, # type: Optional[str] check_in_id=None, # type: Optional[str] status=None, # type: Optional[str] duration=None, # type: Optional[float] monitor_config=None, # type: Optional[MonitorConfig] ): # type: (...) -> str check_in_event = _create_check_in_event( monitor_slug=monitor_slug, check_in_id=check_in_id, status=status, duration_s=duration, monitor_config=monitor_config, ) sentry_sdk.capture_event(check_in_event) return check_in_event["check_in_id"] sentry-python-2.18.0/sentry_sdk/crons/consts.py000066400000000000000000000001271471214654000216450ustar00rootroot00000000000000class MonitorStatus: IN_PROGRESS = "in_progress" OK = "ok" ERROR = "error" sentry-python-2.18.0/sentry_sdk/crons/decorator.py000066400000000000000000000075111471214654000223220ustar00rootroot00000000000000from functools import wraps from inspect import iscoroutinefunction from sentry_sdk.crons import capture_checkin from sentry_sdk.crons.consts import MonitorStatus from sentry_sdk.utils import now from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Awaitable, Callable from types import TracebackType from typing import ( Any, Optional, ParamSpec, Type, TypeVar, Union, cast, overload, ) from sentry_sdk._types import MonitorConfig P = ParamSpec("P") R = TypeVar("R") class monitor: # noqa: N801 """ Decorator/context manager to capture checkin events for a monitor. Usage (as decorator): ``` import sentry_sdk app = Celery() @app.task @sentry_sdk.monitor(monitor_slug='my-fancy-slug') def test(arg): print(arg) ``` This does not have to be used with Celery, but if you do use it with celery, put the `@sentry_sdk.monitor` decorator below Celery's `@app.task` decorator. Usage (as context manager): ``` import sentry_sdk def test(arg): with sentry_sdk.monitor(monitor_slug='my-fancy-slug'): print(arg) ``` """ def __init__(self, monitor_slug=None, monitor_config=None): # type: (Optional[str], Optional[MonitorConfig]) -> None self.monitor_slug = monitor_slug self.monitor_config = monitor_config def __enter__(self): # type: () -> None self.start_timestamp = now() self.check_in_id = capture_checkin( monitor_slug=self.monitor_slug, status=MonitorStatus.IN_PROGRESS, monitor_config=self.monitor_config, ) def __exit__(self, exc_type, exc_value, traceback): # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> None duration_s = now() - self.start_timestamp if exc_type is None and exc_value is None and traceback is None: status = MonitorStatus.OK else: status = MonitorStatus.ERROR capture_checkin( monitor_slug=self.monitor_slug, check_in_id=self.check_in_id, status=status, duration=duration_s, monitor_config=self.monitor_config, ) if TYPE_CHECKING: @overload def __call__(self, fn): # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]] # Unfortunately, mypy does not give us any reliable way to type check the # return value of an Awaitable (i.e. async function) for this overload, # since calling iscouroutinefunction narrows the type to Callable[P, Awaitable[Any]]. ... @overload def __call__(self, fn): # type: (Callable[P, R]) -> Callable[P, R] ... def __call__( self, fn, # type: Union[Callable[P, R], Callable[P, Awaitable[Any]]] ): # type: (...) -> Union[Callable[P, R], Callable[P, Awaitable[Any]]] if iscoroutinefunction(fn): return self._async_wrapper(fn) else: if TYPE_CHECKING: fn = cast("Callable[P, R]", fn) return self._sync_wrapper(fn) def _async_wrapper(self, fn): # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]] @wraps(fn) async def inner(*args: "P.args", **kwargs: "P.kwargs"): # type: (...) -> R with self: return await fn(*args, **kwargs) return inner def _sync_wrapper(self, fn): # type: (Callable[P, R]) -> Callable[P, R] @wraps(fn) def inner(*args: "P.args", **kwargs: "P.kwargs"): # type: (...) -> R with self: return fn(*args, **kwargs) return inner sentry-python-2.18.0/sentry_sdk/debug.py000066400000000000000000000017731471214654000203060ustar00rootroot00000000000000import sys import logging import warnings from sentry_sdk import get_client from sentry_sdk.client import _client_init_debug from sentry_sdk.utils import logger from logging import LogRecord class _DebugFilter(logging.Filter): def filter(self, record): # type: (LogRecord) -> bool if _client_init_debug.get(False): return True return get_client().options["debug"] def init_debug_support(): # type: () -> None if not logger.handlers: configure_logger() def configure_logger(): # type: () -> None _handler = logging.StreamHandler(sys.stderr) _handler.setFormatter(logging.Formatter(" [sentry] %(levelname)s: %(message)s")) logger.addHandler(_handler) logger.setLevel(logging.DEBUG) logger.addFilter(_DebugFilter()) def configure_debug_hub(): # type: () -> None warnings.warn( "configure_debug_hub is deprecated. Please remove calls to it, as it is a no-op.", DeprecationWarning, stacklevel=2, ) sentry-python-2.18.0/sentry_sdk/envelope.py000066400000000000000000000237021471214654000210310ustar00rootroot00000000000000import io import json import mimetypes from sentry_sdk.session import Session from sentry_sdk.utils import json_dumps, capture_internal_exceptions from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Optional from typing import Union from typing import Dict from typing import List from typing import Iterator from sentry_sdk._types import Event, EventDataCategory def parse_json(data): # type: (Union[bytes, str]) -> Any # on some python 3 versions this needs to be bytes if isinstance(data, bytes): data = data.decode("utf-8", "replace") return json.loads(data) class Envelope: """ Represents a Sentry Envelope. The calling code is responsible for adhering to the constraints documented in the Sentry docs: https://develop.sentry.dev/sdk/envelopes/#data-model. In particular, each envelope may have at most one Item with type "event" or "transaction" (but not both). """ def __init__( self, headers=None, # type: Optional[Dict[str, Any]] items=None, # type: Optional[List[Item]] ): # type: (...) -> None if headers is not None: headers = dict(headers) self.headers = headers or {} if items is None: items = [] else: items = list(items) self.items = items @property def description(self): # type: (...) -> str return "envelope with %s items (%s)" % ( len(self.items), ", ".join(x.data_category for x in self.items), ) def add_event( self, event # type: Event ): # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=event), type="event")) def add_transaction( self, transaction # type: Event ): # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction")) def add_profile( self, profile # type: Any ): # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=profile), type="profile")) def add_profile_chunk( self, profile_chunk # type: Any ): # type: (...) -> None self.add_item( Item(payload=PayloadRef(json=profile_chunk), type="profile_chunk") ) def add_checkin( self, checkin # type: Any ): # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=checkin), type="check_in")) def add_session( self, session # type: Union[Session, Any] ): # type: (...) -> None if isinstance(session, Session): session = session.to_json() self.add_item(Item(payload=PayloadRef(json=session), type="session")) def add_sessions( self, sessions # type: Any ): # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=sessions), type="sessions")) def add_item( self, item # type: Item ): # type: (...) -> None self.items.append(item) def get_event(self): # type: (...) -> Optional[Event] for items in self.items: event = items.get_event() if event is not None: return event return None def get_transaction_event(self): # type: (...) -> Optional[Event] for item in self.items: event = item.get_transaction_event() if event is not None: return event return None def __iter__(self): # type: (...) -> Iterator[Item] return iter(self.items) def serialize_into( self, f # type: Any ): # type: (...) -> None f.write(json_dumps(self.headers)) f.write(b"\n") for item in self.items: item.serialize_into(f) def serialize(self): # type: (...) -> bytes out = io.BytesIO() self.serialize_into(out) return out.getvalue() @classmethod def deserialize_from( cls, f # type: Any ): # type: (...) -> Envelope headers = parse_json(f.readline()) items = [] while 1: item = Item.deserialize_from(f) if item is None: break items.append(item) return cls(headers=headers, items=items) @classmethod def deserialize( cls, bytes # type: bytes ): # type: (...) -> Envelope return cls.deserialize_from(io.BytesIO(bytes)) def __repr__(self): # type: (...) -> str return "" % (self.headers, self.items) class PayloadRef: def __init__( self, bytes=None, # type: Optional[bytes] path=None, # type: Optional[Union[bytes, str]] json=None, # type: Optional[Any] ): # type: (...) -> None self.json = json self.bytes = bytes self.path = path def get_bytes(self): # type: (...) -> bytes if self.bytes is None: if self.path is not None: with capture_internal_exceptions(): with open(self.path, "rb") as f: self.bytes = f.read() elif self.json is not None: self.bytes = json_dumps(self.json) return self.bytes or b"" @property def inferred_content_type(self): # type: (...) -> str if self.json is not None: return "application/json" elif self.path is not None: path = self.path if isinstance(path, bytes): path = path.decode("utf-8", "replace") ty = mimetypes.guess_type(path)[0] if ty: return ty return "application/octet-stream" def __repr__(self): # type: (...) -> str return "" % (self.inferred_content_type,) class Item: def __init__( self, payload, # type: Union[bytes, str, PayloadRef] headers=None, # type: Optional[Dict[str, Any]] type=None, # type: Optional[str] content_type=None, # type: Optional[str] filename=None, # type: Optional[str] ): if headers is not None: headers = dict(headers) elif headers is None: headers = {} self.headers = headers if isinstance(payload, bytes): payload = PayloadRef(bytes=payload) elif isinstance(payload, str): payload = PayloadRef(bytes=payload.encode("utf-8")) else: payload = payload if filename is not None: headers["filename"] = filename if type is not None: headers["type"] = type if content_type is not None: headers["content_type"] = content_type elif "content_type" not in headers: headers["content_type"] = payload.inferred_content_type self.payload = payload def __repr__(self): # type: (...) -> str return "" % ( self.headers, self.payload, self.data_category, ) @property def type(self): # type: (...) -> Optional[str] return self.headers.get("type") @property def data_category(self): # type: (...) -> EventDataCategory ty = self.headers.get("type") if ty == "session" or ty == "sessions": return "session" elif ty == "attachment": return "attachment" elif ty == "transaction": return "transaction" elif ty == "event": return "error" elif ty == "client_report": return "internal" elif ty == "profile": return "profile" elif ty == "profile_chunk": return "profile_chunk" elif ty == "statsd": return "metric_bucket" elif ty == "check_in": return "monitor" else: return "default" def get_bytes(self): # type: (...) -> bytes return self.payload.get_bytes() def get_event(self): # type: (...) -> Optional[Event] """ Returns an error event if there is one. """ if self.type == "event" and self.payload.json is not None: return self.payload.json return None def get_transaction_event(self): # type: (...) -> Optional[Event] if self.type == "transaction" and self.payload.json is not None: return self.payload.json return None def serialize_into( self, f # type: Any ): # type: (...) -> None headers = dict(self.headers) bytes = self.get_bytes() headers["length"] = len(bytes) f.write(json_dumps(headers)) f.write(b"\n") f.write(bytes) f.write(b"\n") def serialize(self): # type: (...) -> bytes out = io.BytesIO() self.serialize_into(out) return out.getvalue() @classmethod def deserialize_from( cls, f # type: Any ): # type: (...) -> Optional[Item] line = f.readline().rstrip() if not line: return None headers = parse_json(line) length = headers.get("length") if length is not None: payload = f.read(length) f.readline() else: # if no length was specified we need to read up to the end of line # and remove it (if it is present, i.e. not the very last char in an eof terminated envelope) payload = f.readline().rstrip(b"\n") if headers.get("type") in ("event", "transaction", "metric_buckets"): rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload))) else: rv = cls(headers=headers, payload=payload) return rv @classmethod def deserialize( cls, bytes # type: bytes ): # type: (...) -> Optional[Item] return cls.deserialize_from(io.BytesIO(bytes)) sentry-python-2.18.0/sentry_sdk/flag_utils.py000066400000000000000000000023101471214654000213350ustar00rootroot00000000000000from copy import copy from typing import TYPE_CHECKING import sentry_sdk from sentry_sdk._lru_cache import LRUCache if TYPE_CHECKING: from typing import TypedDict, Optional from sentry_sdk._types import Event, ExcInfo FlagData = TypedDict("FlagData", {"flag": str, "result": bool}) DEFAULT_FLAG_CAPACITY = 100 class FlagBuffer: def __init__(self, capacity): # type: (int) -> None self.buffer = LRUCache(capacity) self.capacity = capacity def clear(self): # type: () -> None self.buffer = LRUCache(self.capacity) def __copy__(self): # type: () -> FlagBuffer buffer = FlagBuffer(capacity=self.capacity) buffer.buffer = copy(self.buffer) return buffer def get(self): # type: () -> list[FlagData] return [{"flag": key, "result": value} for key, value in self.buffer.get_all()] def set(self, flag, result): # type: (str, bool) -> None self.buffer.set(flag, result) def flag_error_processor(event, exc_info): # type: (Event, ExcInfo) -> Optional[Event] scope = sentry_sdk.get_current_scope() event["contexts"]["flags"] = {"values": scope.flags.get()} return event sentry-python-2.18.0/sentry_sdk/hub.py000066400000000000000000000621131471214654000177710ustar00rootroot00000000000000import warnings from contextlib import contextmanager from sentry_sdk import ( get_client, get_global_scope, get_isolation_scope, get_current_scope, ) from sentry_sdk._compat import with_metaclass from sentry_sdk.consts import INSTRUMENTER from sentry_sdk.scope import _ScopeManager from sentry_sdk.client import Client from sentry_sdk.tracing import ( NoOpSpan, Span, Transaction, ) from sentry_sdk.utils import ( logger, ContextVar, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import ContextManager from typing import Dict from typing import Generator from typing import List from typing import Optional from typing import overload from typing import Tuple from typing import Type from typing import TypeVar from typing import Union from typing_extensions import Unpack from sentry_sdk.scope import Scope from sentry_sdk.client import BaseClient from sentry_sdk.integrations import Integration from sentry_sdk._types import ( Event, Hint, Breadcrumb, BreadcrumbHint, ExcInfo, LogLevelStr, SamplingContext, ) from sentry_sdk.tracing import TransactionKwargs T = TypeVar("T") else: def overload(x): # type: (T) -> T return x class SentryHubDeprecationWarning(DeprecationWarning): """ A custom deprecation warning to inform users that the Hub is deprecated. """ _MESSAGE = ( "`sentry_sdk.Hub` is deprecated and will be removed in a future major release. " "Please consult our 1.x to 2.x migration guide for details on how to migrate " "`Hub` usage to the new API: " "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x" ) def __init__(self, *_): # type: (*object) -> None super().__init__(self._MESSAGE) @contextmanager def _suppress_hub_deprecation_warning(): # type: () -> Generator[None, None, None] """Utility function to suppress deprecation warnings for the Hub.""" with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=SentryHubDeprecationWarning) yield _local = ContextVar("sentry_current_hub") class HubMeta(type): @property def current(cls): # type: () -> Hub """Returns the current instance of the hub.""" warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) rv = _local.get(None) if rv is None: with _suppress_hub_deprecation_warning(): # This will raise a deprecation warning; suppress it since we already warned above. rv = Hub(GLOBAL_HUB) _local.set(rv) return rv @property def main(cls): # type: () -> Hub """Returns the main instance of the hub.""" warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) return GLOBAL_HUB class Hub(with_metaclass(HubMeta)): # type: ignore """ .. deprecated:: 2.0.0 The Hub is deprecated. Its functionality will be merged into :py:class:`sentry_sdk.scope.Scope`. The hub wraps the concurrency management of the SDK. Each thread has its own hub but the hub might transfer with the flow of execution if context vars are available. If the hub is used with a with statement it's temporarily activated. """ _stack = None # type: List[Tuple[Optional[Client], Scope]] _scope = None # type: Optional[Scope] # Mypy doesn't pick up on the metaclass. if TYPE_CHECKING: current = None # type: Hub main = None # type: Hub def __init__( self, client_or_hub=None, # type: Optional[Union[Hub, Client]] scope=None, # type: Optional[Any] ): # type: (...) -> None warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) current_scope = None if isinstance(client_or_hub, Hub): client = get_client() if scope is None: # hub cloning is going on, we use a fork of the current/isolation scope for context manager scope = get_isolation_scope().fork() current_scope = get_current_scope().fork() else: client = client_or_hub # type: ignore get_global_scope().set_client(client) if scope is None: # so there is no Hub cloning going on # just the current isolation scope is used for context manager scope = get_isolation_scope() current_scope = get_current_scope() if current_scope is None: # just the current current scope is used for context manager current_scope = get_current_scope() self._stack = [(client, scope)] # type: ignore self._last_event_id = None # type: Optional[str] self._old_hubs = [] # type: List[Hub] self._old_current_scopes = [] # type: List[Scope] self._old_isolation_scopes = [] # type: List[Scope] self._current_scope = current_scope # type: Scope self._scope = scope # type: Scope def __enter__(self): # type: () -> Hub self._old_hubs.append(Hub.current) _local.set(self) current_scope = get_current_scope() self._old_current_scopes.append(current_scope) scope._current_scope.set(self._current_scope) isolation_scope = get_isolation_scope() self._old_isolation_scopes.append(isolation_scope) scope._isolation_scope.set(self._scope) return self def __exit__( self, exc_type, # type: Optional[type] exc_value, # type: Optional[BaseException] tb, # type: Optional[Any] ): # type: (...) -> None old = self._old_hubs.pop() _local.set(old) old_current_scope = self._old_current_scopes.pop() scope._current_scope.set(old_current_scope) old_isolation_scope = self._old_isolation_scopes.pop() scope._isolation_scope.set(old_isolation_scope) def run( self, callback # type: Callable[[], T] ): # type: (...) -> T """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Runs a callback in the context of the hub. Alternatively the with statement can be used on the hub directly. """ with self: return callback() def get_integration( self, name_or_class # type: Union[str, Type[Integration]] ): # type: (...) -> Any """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.client._Client.get_integration` instead. Returns the integration for this hub by name or class. If there is no client bound or the client does not have that integration then `None` is returned. If the return value is not `None` the hub is guaranteed to have a client attached. """ return get_client().get_integration(name_or_class) @property def client(self): # type: () -> Optional[BaseClient] """ .. deprecated:: 2.0.0 This property is deprecated and will be removed in a future release. Please use :py:func:`sentry_sdk.api.get_client` instead. Returns the current client on the hub. """ client = get_client() if not client.is_active(): return None return client @property def scope(self): # type: () -> Scope """ .. deprecated:: 2.0.0 This property is deprecated and will be removed in a future release. Returns the current scope on the hub. """ return get_isolation_scope() def last_event_id(self): # type: () -> Optional[str] """ Returns the last event ID. .. deprecated:: 1.40.5 This function is deprecated and will be removed in a future release. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly. """ logger.warning( "Deprecated: last_event_id is deprecated. This will be removed in the future. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly." ) return self._last_event_id def bind_client( self, new # type: Optional[BaseClient] ): # type: (...) -> None """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.set_client` instead. Binds a new client to the hub. """ get_global_scope().set_client(new) def capture_event(self, event, hint=None, scope=None, **scope_kwargs): # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.capture_event` instead. Captures an event. Alias of :py:meth:`sentry_sdk.Scope.capture_event`. :param event: A ready-made event that can be directly sent to Sentry. :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object. :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. The `scope` and `scope_kwargs` parameters are mutually exclusive. :param scope_kwargs: Optional data to apply to event. For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. The `scope` and `scope_kwargs` parameters are mutually exclusive. """ last_event_id = get_current_scope().capture_event( event, hint, scope=scope, **scope_kwargs ) is_transaction = event.get("type") == "transaction" if last_event_id is not None and not is_transaction: self._last_event_id = last_event_id return last_event_id def capture_message(self, message, level=None, scope=None, **scope_kwargs): # type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str] """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.capture_message` instead. Captures a message. Alias of :py:meth:`sentry_sdk.Scope.capture_message`. :param message: The string to send as the message to Sentry. :param level: If no level is provided, the default level is `info`. :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. The `scope` and `scope_kwargs` parameters are mutually exclusive. :param scope_kwargs: Optional data to apply to event. For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. The `scope` and `scope_kwargs` parameters are mutually exclusive. :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). """ last_event_id = get_current_scope().capture_message( message, level=level, scope=scope, **scope_kwargs ) if last_event_id is not None: self._last_event_id = last_event_id return last_event_id def capture_exception(self, error=None, scope=None, **scope_kwargs): # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str] """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.capture_exception` instead. Captures an exception. Alias of :py:meth:`sentry_sdk.Scope.capture_exception`. :param error: An exception to capture. If `None`, `sys.exc_info()` will be used. :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. The `scope` and `scope_kwargs` parameters are mutually exclusive. :param scope_kwargs: Optional data to apply to event. For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. The `scope` and `scope_kwargs` parameters are mutually exclusive. :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). """ last_event_id = get_current_scope().capture_exception( error, scope=scope, **scope_kwargs ) if last_event_id is not None: self._last_event_id = last_event_id return last_event_id def add_breadcrumb(self, crumb=None, hint=None, **kwargs): # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.add_breadcrumb` instead. Adds a breadcrumb. :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects. :param hint: An optional value that can be used by `before_breadcrumb` to customize the breadcrumbs that are emitted. """ get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): # type: (str, Any) -> Span """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.start_span` instead. Start a span whose parent is the currently active span or transaction, if any. The return value is a :py:class:`sentry_sdk.tracing.Span` instance, typically used as a context manager to start and stop timing in a `with` block. Only spans contained in a transaction are sent to Sentry. Most integrations start a transaction at the appropriate time, for example for every incoming HTTP request. Use :py:meth:`sentry_sdk.start_transaction` to start a new transaction when one is not already in progress. For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. """ scope = get_current_scope() return scope.start_span(instrumenter=instrumenter, **kwargs) def start_transaction( self, transaction=None, instrumenter=INSTRUMENTER.SENTRY, custom_sampling_context=None, **kwargs ): # type: (Optional[Transaction], str, Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.start_transaction` instead. Start and return a transaction. Start an existing transaction if given, otherwise create and start a new transaction with kwargs. This is the entry point to manual tracing instrumentation. A tree structure can be built by adding child spans to the transaction, and child spans to other spans. To start a new child span within the transaction or any span, call the respective `.start_child()` method. Every child span must be finished before the transaction is finished, otherwise the unfinished spans are discarded. When used as context managers, spans and transactions are automatically finished at the end of the `with` block. If not using context managers, call the `.finish()` method. When the transaction is finished, it will be sent to Sentry with all its finished child spans. For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`. """ scope = get_current_scope() # For backwards compatibility, we allow passing the scope as the hub. # We need a major release to make this nice. (if someone searches the code: deprecated) # Type checking disabled for this line because deprecated keys are not allowed in the type signature. kwargs["hub"] = scope # type: ignore return scope.start_transaction( transaction, instrumenter, custom_sampling_context, **kwargs ) def continue_trace(self, environ_or_headers, op=None, name=None, source=None): # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.continue_trace` instead. Sets the propagation context from environment or headers and returns a transaction. """ return get_isolation_scope().continue_trace( environ_or_headers=environ_or_headers, op=op, name=name, source=source ) @overload def push_scope( self, callback=None # type: Optional[None] ): # type: (...) -> ContextManager[Scope] pass @overload def push_scope( # noqa: F811 self, callback # type: Callable[[Scope], None] ): # type: (...) -> None pass def push_scope( # noqa self, callback=None, # type: Optional[Callable[[Scope], None]] continue_trace=True, # type: bool ): # type: (...) -> Optional[ContextManager[Scope]] """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Pushes a new layer on the scope stack. :param callback: If provided, this method pushes a scope, calls `callback`, and pops the scope again. :returns: If no `callback` is provided, a context manager that should be used to pop the scope again. """ if callback is not None: with self.push_scope() as scope: callback(scope) return None return _ScopeManager(self) def pop_scope_unsafe(self): # type: () -> Tuple[Optional[Client], Scope] """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Pops a scope layer from the stack. Try to use the context manager :py:meth:`push_scope` instead. """ rv = self._stack.pop() assert self._stack, "stack must have at least one layer" return rv @overload def configure_scope( self, callback=None # type: Optional[None] ): # type: (...) -> ContextManager[Scope] pass @overload def configure_scope( # noqa: F811 self, callback # type: Callable[[Scope], None] ): # type: (...) -> None pass def configure_scope( # noqa self, callback=None, # type: Optional[Callable[[Scope], None]] continue_trace=True, # type: bool ): # type: (...) -> Optional[ContextManager[Scope]] """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Reconfigures the scope. :param callback: If provided, call the callback with the current scope. :returns: If no callback is provided, returns a context manager that returns the scope. """ scope = get_isolation_scope() if continue_trace: scope.generate_propagation_context() if callback is not None: # TODO: used to return None when client is None. Check if this changes behavior. callback(scope) return None @contextmanager def inner(): # type: () -> Generator[Scope, None, None] yield scope return inner() def start_session( self, session_mode="application" # type: str ): # type: (...) -> None """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.start_session` instead. Starts a new session. """ get_isolation_scope().start_session( session_mode=session_mode, ) def end_session(self): # type: (...) -> None """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.end_session` instead. Ends the current session if there is one. """ get_isolation_scope().end_session() def stop_auto_session_tracking(self): # type: (...) -> None """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.stop_auto_session_tracking` instead. Stops automatic session tracking. This temporarily session tracking for the current scope when called. To resume session tracking call `resume_auto_session_tracking`. """ get_isolation_scope().stop_auto_session_tracking() def resume_auto_session_tracking(self): # type: (...) -> None """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.resume_auto_session_tracking` instead. Resumes automatic session tracking for the current scope if disabled earlier. This requires that generally automatic session tracking is enabled. """ get_isolation_scope().resume_auto_session_tracking() def flush( self, timeout=None, # type: Optional[float] callback=None, # type: Optional[Callable[[int, float], None]] ): # type: (...) -> None """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.client._Client.flush` instead. Alias for :py:meth:`sentry_sdk.client._Client.flush` """ return get_client().flush(timeout=timeout, callback=callback) def get_traceparent(self): # type: () -> Optional[str] """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.get_traceparent` instead. Returns the traceparent either from the active span or from the scope. """ current_scope = get_current_scope() traceparent = current_scope.get_traceparent() if traceparent is None: isolation_scope = get_isolation_scope() traceparent = isolation_scope.get_traceparent() return traceparent def get_baggage(self): # type: () -> Optional[str] """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.get_baggage` instead. Returns Baggage either from the active span or from the scope. """ current_scope = get_current_scope() baggage = current_scope.get_baggage() if baggage is None: isolation_scope = get_isolation_scope() baggage = isolation_scope.get_baggage() if baggage is not None: return baggage.serialize() return None def iter_trace_propagation_headers(self, span=None): # type: (Optional[Span]) -> Generator[Tuple[str, str], None, None] """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.iter_trace_propagation_headers` instead. Return HTTP headers which allow propagation of trace data. Data taken from the span representing the request, if available, or the current span on the scope if not. """ return get_current_scope().iter_trace_propagation_headers( span=span, ) def trace_propagation_meta(self, span=None): # type: (Optional[Span]) -> str """ .. deprecated:: 2.0.0 This function is deprecated and will be removed in a future release. Please use :py:meth:`sentry_sdk.Scope.trace_propagation_meta` instead. Return meta tags which should be injected into HTML templates to allow propagation of trace information. """ if span is not None: logger.warning( "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future." ) return get_current_scope().trace_propagation_meta( span=span, ) with _suppress_hub_deprecation_warning(): # Suppress deprecation warning for the Hub here, since we still always # import this module. GLOBAL_HUB = Hub() _local.set(GLOBAL_HUB) # Circular imports from sentry_sdk import scope sentry-python-2.18.0/sentry_sdk/integrations/000077500000000000000000000000001471214654000213445ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/integrations/__init__.py000066400000000000000000000206141471214654000234600ustar00rootroot00000000000000from abc import ABC, abstractmethod from threading import Lock from sentry_sdk.utils import logger from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Sequence from typing import Callable from typing import Dict from typing import Iterator from typing import List from typing import Optional from typing import Set from typing import Type from typing import Union _DEFAULT_FAILED_REQUEST_STATUS_CODES = frozenset(range(500, 600)) _installer_lock = Lock() # Set of all integration identifiers we have attempted to install _processed_integrations = set() # type: Set[str] # Set of all integration identifiers we have actually installed _installed_integrations = set() # type: Set[str] def _generate_default_integrations_iterator( integrations, # type: List[str] auto_enabling_integrations, # type: List[str] ): # type: (...) -> Callable[[bool], Iterator[Type[Integration]]] def iter_default_integrations(with_auto_enabling_integrations): # type: (bool) -> Iterator[Type[Integration]] """Returns an iterator of the default integration classes:""" from importlib import import_module if with_auto_enabling_integrations: all_import_strings = integrations + auto_enabling_integrations else: all_import_strings = integrations for import_string in all_import_strings: try: module, cls = import_string.rsplit(".", 1) yield getattr(import_module(module), cls) except (DidNotEnable, SyntaxError) as e: logger.debug( "Did not import default integration %s: %s", import_string, e ) if isinstance(iter_default_integrations.__doc__, str): for import_string in integrations: iter_default_integrations.__doc__ += "\n- `{}`".format(import_string) return iter_default_integrations _DEFAULT_INTEGRATIONS = [ # stdlib/base runtime integrations "sentry_sdk.integrations.argv.ArgvIntegration", "sentry_sdk.integrations.atexit.AtexitIntegration", "sentry_sdk.integrations.dedupe.DedupeIntegration", "sentry_sdk.integrations.excepthook.ExcepthookIntegration", "sentry_sdk.integrations.logging.LoggingIntegration", "sentry_sdk.integrations.modules.ModulesIntegration", "sentry_sdk.integrations.stdlib.StdlibIntegration", "sentry_sdk.integrations.threading.ThreadingIntegration", ] _AUTO_ENABLING_INTEGRATIONS = [ "sentry_sdk.integrations.aiohttp.AioHttpIntegration", "sentry_sdk.integrations.anthropic.AnthropicIntegration", "sentry_sdk.integrations.ariadne.AriadneIntegration", "sentry_sdk.integrations.arq.ArqIntegration", "sentry_sdk.integrations.asyncpg.AsyncPGIntegration", "sentry_sdk.integrations.boto3.Boto3Integration", "sentry_sdk.integrations.bottle.BottleIntegration", "sentry_sdk.integrations.celery.CeleryIntegration", "sentry_sdk.integrations.chalice.ChaliceIntegration", "sentry_sdk.integrations.clickhouse_driver.ClickhouseDriverIntegration", "sentry_sdk.integrations.cohere.CohereIntegration", "sentry_sdk.integrations.django.DjangoIntegration", "sentry_sdk.integrations.falcon.FalconIntegration", "sentry_sdk.integrations.fastapi.FastApiIntegration", "sentry_sdk.integrations.flask.FlaskIntegration", "sentry_sdk.integrations.gql.GQLIntegration", "sentry_sdk.integrations.graphene.GrapheneIntegration", "sentry_sdk.integrations.httpx.HttpxIntegration", "sentry_sdk.integrations.huey.HueyIntegration", "sentry_sdk.integrations.huggingface_hub.HuggingfaceHubIntegration", "sentry_sdk.integrations.langchain.LangchainIntegration", "sentry_sdk.integrations.loguru.LoguruIntegration", "sentry_sdk.integrations.openai.OpenAIIntegration", "sentry_sdk.integrations.pymongo.PyMongoIntegration", "sentry_sdk.integrations.pyramid.PyramidIntegration", "sentry_sdk.integrations.quart.QuartIntegration", "sentry_sdk.integrations.redis.RedisIntegration", "sentry_sdk.integrations.rq.RqIntegration", "sentry_sdk.integrations.sanic.SanicIntegration", "sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration", "sentry_sdk.integrations.starlette.StarletteIntegration", "sentry_sdk.integrations.starlite.StarliteIntegration", "sentry_sdk.integrations.strawberry.StrawberryIntegration", "sentry_sdk.integrations.tornado.TornadoIntegration", ] iter_default_integrations = _generate_default_integrations_iterator( integrations=_DEFAULT_INTEGRATIONS, auto_enabling_integrations=_AUTO_ENABLING_INTEGRATIONS, ) del _generate_default_integrations_iterator def setup_integrations( integrations, with_defaults=True, with_auto_enabling_integrations=False, disabled_integrations=None, ): # type: (Sequence[Integration], bool, bool, Optional[Sequence[Union[type[Integration], Integration]]]) -> Dict[str, Integration] """ Given a list of integration instances, this installs them all. When `with_defaults` is set to `True` all default integrations are added unless they were already provided before. `disabled_integrations` takes precedence over `with_defaults` and `with_auto_enabling_integrations`. """ integrations = dict( (integration.identifier, integration) for integration in integrations or () ) logger.debug("Setting up integrations (with default = %s)", with_defaults) # Integrations that will not be enabled disabled_integrations = [ integration if isinstance(integration, type) else type(integration) for integration in disabled_integrations or [] ] # Integrations that are not explicitly set up by the user. used_as_default_integration = set() if with_defaults: for integration_cls in iter_default_integrations( with_auto_enabling_integrations ): if integration_cls.identifier not in integrations: instance = integration_cls() integrations[instance.identifier] = instance used_as_default_integration.add(instance.identifier) for identifier, integration in integrations.items(): with _installer_lock: if identifier not in _processed_integrations: if type(integration) in disabled_integrations: logger.debug("Ignoring integration %s", identifier) else: logger.debug( "Setting up previously not enabled integration %s", identifier ) try: type(integration).setup_once() except DidNotEnable as e: if identifier not in used_as_default_integration: raise logger.debug( "Did not enable default integration %s: %s", identifier, e ) else: _installed_integrations.add(identifier) _processed_integrations.add(identifier) integrations = { identifier: integration for identifier, integration in integrations.items() if identifier in _installed_integrations } for identifier in integrations: logger.debug("Enabling integration %s", identifier) return integrations class DidNotEnable(Exception): # noqa: N818 """ The integration could not be enabled due to a trivial user error like `flask` not being installed for the `FlaskIntegration`. This exception is silently swallowed for default integrations, but reraised for explicitly enabled integrations. """ class Integration(ABC): """Baseclass for all integrations. To accept options for an integration, implement your own constructor that saves those options on `self`. """ install = None """Legacy method, do not implement.""" identifier = None # type: str """String unique ID of integration type""" @staticmethod @abstractmethod def setup_once(): # type: () -> None """ Initialize the integration. This function is only called once, ever. Configuration is not available at this point, so the only thing to do here is to hook into exception handlers, and perhaps do monkeypatches. Inside those hooks `Integration.current` can be used to access the instance again. """ pass sentry-python-2.18.0/sentry_sdk/integrations/_asgi_common.py000066400000000000000000000061631471214654000243560ustar00rootroot00000000000000import urllib from sentry_sdk.scope import should_send_default_pii from sentry_sdk.integrations._wsgi_common import _filter_headers from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Optional from typing import Union from typing_extensions import Literal from sentry_sdk.utils import AnnotatedValue def _get_headers(asgi_scope): # type: (Any) -> Dict[str, str] """ Extract headers from the ASGI scope, in the format that the Sentry protocol expects. """ headers = {} # type: Dict[str, str] for raw_key, raw_value in asgi_scope["headers"]: key = raw_key.decode("latin-1") value = raw_value.decode("latin-1") if key in headers: headers[key] = headers[key] + ", " + value else: headers[key] = value return headers def _get_url(asgi_scope, default_scheme, host): # type: (Dict[str, Any], Literal["ws", "http"], Optional[Union[AnnotatedValue, str]]) -> str """ Extract URL from the ASGI scope, without also including the querystring. """ scheme = asgi_scope.get("scheme", default_scheme) server = asgi_scope.get("server", None) path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "") if host: return "%s://%s%s" % (scheme, host, path) if server is not None: host, port = server default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}.get(scheme) if port != default_port: return "%s://%s:%s%s" % (scheme, host, port, path) return "%s://%s%s" % (scheme, host, path) return path def _get_query(asgi_scope): # type: (Any) -> Any """ Extract querystring from the ASGI scope, in the format that the Sentry protocol expects. """ qs = asgi_scope.get("query_string") if not qs: return None return urllib.parse.unquote(qs.decode("latin-1")) def _get_ip(asgi_scope): # type: (Any) -> str """ Extract IP Address from the ASGI scope based on request headers with fallback to scope client. """ headers = _get_headers(asgi_scope) try: return headers["x-forwarded-for"].split(",")[0].strip() except (KeyError, IndexError): pass try: return headers["x-real-ip"] except KeyError: pass return asgi_scope.get("client")[0] def _get_request_data(asgi_scope): # type: (Any) -> Dict[str, Any] """ Returns data related to the HTTP request from the ASGI scope. """ request_data = {} # type: Dict[str, Any] ty = asgi_scope["type"] if ty in ("http", "websocket"): request_data["method"] = asgi_scope.get("method") request_data["headers"] = headers = _filter_headers(_get_headers(asgi_scope)) request_data["query_string"] = _get_query(asgi_scope) request_data["url"] = _get_url( asgi_scope, "http" if ty == "http" else "ws", headers.get("host") ) client = asgi_scope.get("client") if client and should_send_default_pii(): request_data["env"] = {"REMOTE_ADDR": _get_ip(asgi_scope)} return request_data sentry-python-2.18.0/sentry_sdk/integrations/_wsgi_common.py000066400000000000000000000163761471214654000244130ustar00rootroot00000000000000from contextlib import contextmanager import json from copy import deepcopy import sentry_sdk from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import AnnotatedValue, logger try: from django.http.request import RawPostDataException except ImportError: RawPostDataException = None from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Iterator from typing import Mapping from typing import MutableMapping from typing import Optional from typing import Union from sentry_sdk._types import Event, HttpStatusCodeRange SENSITIVE_ENV_KEYS = ( "REMOTE_ADDR", "HTTP_X_FORWARDED_FOR", "HTTP_SET_COOKIE", "HTTP_COOKIE", "HTTP_AUTHORIZATION", "HTTP_X_API_KEY", "HTTP_X_FORWARDED_FOR", "HTTP_X_REAL_IP", ) SENSITIVE_HEADERS = tuple( x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_") ) DEFAULT_HTTP_METHODS_TO_CAPTURE = ( "CONNECT", "DELETE", "GET", # "HEAD", # do not capture HEAD requests by default # "OPTIONS", # do not capture OPTIONS requests by default "PATCH", "POST", "PUT", "TRACE", ) # This noop context manager can be replaced with "from contextlib import nullcontext" when we drop Python 3.6 support @contextmanager def nullcontext(): # type: () -> Iterator[None] yield def request_body_within_bounds(client, content_length): # type: (Optional[sentry_sdk.client.BaseClient], int) -> bool if client is None: return False bodies = client.options["max_request_body_size"] return not ( bodies == "never" or (bodies == "small" and content_length > 10**3) or (bodies == "medium" and content_length > 10**4) ) class RequestExtractor: """ Base class for request extraction. """ # It does not make sense to make this class an ABC because it is not used # for typing, only so that child classes can inherit common methods from # it. Only some child classes implement all methods that raise # NotImplementedError in this class. def __init__(self, request): # type: (Any) -> None self.request = request def extract_into_event(self, event): # type: (Event) -> None client = sentry_sdk.get_client() if not client.is_active(): return data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]] content_length = self.content_length() request_info = event.get("request", {}) if should_send_default_pii(): request_info["cookies"] = dict(self.cookies()) if not request_body_within_bounds(client, content_length): data = AnnotatedValue.removed_because_over_size_limit() else: # First read the raw body data # It is important to read this first because if it is Django # it will cache the body and then we can read the cached version # again in parsed_body() (or json() or wherever). raw_data = None try: raw_data = self.raw_data() except (RawPostDataException, ValueError): # If DjangoRestFramework is used it already read the body for us # so reading it here will fail. We can ignore this. pass parsed_body = self.parsed_body() if parsed_body is not None: data = parsed_body elif raw_data: data = AnnotatedValue.removed_because_raw_data() else: data = None if data is not None: request_info["data"] = data event["request"] = deepcopy(request_info) def content_length(self): # type: () -> int try: return int(self.env().get("CONTENT_LENGTH", 0)) except ValueError: return 0 def cookies(self): # type: () -> MutableMapping[str, Any] raise NotImplementedError() def raw_data(self): # type: () -> Optional[Union[str, bytes]] raise NotImplementedError() def form(self): # type: () -> Optional[Dict[str, Any]] raise NotImplementedError() def parsed_body(self): # type: () -> Optional[Dict[str, Any]] form = self.form() files = self.files() if form or files: data = {} if form: data = dict(form.items()) if files: for key in files.keys(): data[key] = AnnotatedValue.removed_because_raw_data() return data return self.json() def is_json(self): # type: () -> bool return _is_json_content_type(self.env().get("CONTENT_TYPE")) def json(self): # type: () -> Optional[Any] try: if not self.is_json(): return None try: raw_data = self.raw_data() except (RawPostDataException, ValueError): # The body might have already been read, in which case this will # fail raw_data = None if raw_data is None: return None if isinstance(raw_data, str): return json.loads(raw_data) else: return json.loads(raw_data.decode("utf-8")) except ValueError: pass return None def files(self): # type: () -> Optional[Dict[str, Any]] raise NotImplementedError() def size_of_file(self, file): # type: (Any) -> int raise NotImplementedError() def env(self): # type: () -> Dict[str, Any] raise NotImplementedError() def _is_json_content_type(ct): # type: (Optional[str]) -> bool mt = (ct or "").split(";", 1)[0] return ( mt == "application/json" or (mt.startswith("application/")) and mt.endswith("+json") ) def _filter_headers(headers): # type: (Mapping[str, str]) -> Mapping[str, Union[AnnotatedValue, str]] if should_send_default_pii(): return headers return { k: ( v if k.upper().replace("-", "_") not in SENSITIVE_HEADERS else AnnotatedValue.removed_because_over_size_limit() ) for k, v in headers.items() } def _in_http_status_code_range(code, code_ranges): # type: (object, list[HttpStatusCodeRange]) -> bool for target in code_ranges: if isinstance(target, int): if code == target: return True continue try: if code in target: return True except TypeError: logger.warning( "failed_request_status_codes has to be a list of integers or containers" ) return False class HttpCodeRangeContainer: """ Wrapper to make it possible to use list[HttpStatusCodeRange] as a Container[int]. Used for backwards compatibility with the old `failed_request_status_codes` option. """ def __init__(self, code_ranges): # type: (list[HttpStatusCodeRange]) -> None self._code_ranges = code_ranges def __contains__(self, item): # type: (object) -> bool return _in_http_status_code_range(item, self._code_ranges) sentry-python-2.18.0/sentry_sdk/integrations/aiohttp.py000066400000000000000000000313441471214654000233730ustar00rootroot00000000000000import sys import weakref from functools import wraps import sentry_sdk from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA from sentry_sdk.integrations import ( _DEFAULT_FAILED_REQUEST_STATUS_CODES, Integration, DidNotEnable, ) from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.sessions import track_session from sentry_sdk.integrations._wsgi_common import ( _filter_headers, request_body_within_bounds, ) from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE, ) from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, logger, parse_url, parse_version, reraise, transaction_from_function, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, SENSITIVE_DATA_SUBSTITUTE, AnnotatedValue, ) try: import asyncio from aiohttp import __version__ as AIOHTTP_VERSION from aiohttp import ClientSession, TraceConfig from aiohttp.web import Application, HTTPException, UrlDispatcher except ImportError: raise DidNotEnable("AIOHTTP not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from aiohttp.web_request import Request from aiohttp.web_urldispatcher import UrlMappingMatchInfo from aiohttp import TraceRequestStartParams, TraceRequestEndParams from collections.abc import Set from types import SimpleNamespace from typing import Any from typing import Optional from typing import Tuple from typing import Union from sentry_sdk.utils import ExcInfo from sentry_sdk._types import Event, EventProcessor TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern") class AioHttpIntegration(Integration): identifier = "aiohttp" origin = f"auto.http.{identifier}" def __init__( self, transaction_style="handler_name", # type: str *, failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int] ): # type: (...) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style self._failed_request_status_codes = failed_request_status_codes @staticmethod def setup_once(): # type: () -> None version = parse_version(AIOHTTP_VERSION) if version is None: raise DidNotEnable("Unparsable AIOHTTP version: {}".format(AIOHTTP_VERSION)) if version < (3, 4): raise DidNotEnable("AIOHTTP 3.4 or newer required.") if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. raise DidNotEnable( "The aiohttp integration for Sentry requires Python 3.7+ " " or aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE ) ignore_logger("aiohttp.server") old_handle = Application._handle async def sentry_app_handle(self, request, *args, **kwargs): # type: (Any, Request, *Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(AioHttpIntegration) if integration is None: return await old_handle(self, request, *args, **kwargs) weak_request = weakref.ref(request) with sentry_sdk.isolation_scope() as scope: with track_session(scope, session_mode="request"): # Scope data will not leak between requests because aiohttp # create a task to wrap each request. scope.generate_propagation_context() scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) headers = dict(request.headers) transaction = continue_trace( headers, op=OP.HTTP_SERVER, # If this transaction name makes it to the UI, AIOHTTP's # URL resolver did not find a route or died trying. name="generic AIOHTTP request", source=TRANSACTION_SOURCE_ROUTE, origin=AioHttpIntegration.origin, ) with sentry_sdk.start_transaction( transaction, custom_sampling_context={"aiohttp_request": request}, ): try: response = await old_handle(self, request) except HTTPException as e: transaction.set_http_status(e.status_code) if ( e.status_code in integration._failed_request_status_codes ): _capture_exception() raise except (asyncio.CancelledError, ConnectionResetError): transaction.set_status(SPANSTATUS.CANCELLED) raise except Exception: # This will probably map to a 500 but seems like we # have no way to tell. Do not set span status. reraise(*_capture_exception()) try: # A valid response handler will return a valid response with a status. But, if the handler # returns an invalid response (e.g. None), the line below will raise an AttributeError. # Even though this is likely invalid, we need to handle this case to ensure we don't break # the application. response_status = response.status except AttributeError: pass else: transaction.set_http_status(response_status) return response Application._handle = sentry_app_handle old_urldispatcher_resolve = UrlDispatcher.resolve @wraps(old_urldispatcher_resolve) async def sentry_urldispatcher_resolve(self, request): # type: (UrlDispatcher, Request) -> UrlMappingMatchInfo rv = await old_urldispatcher_resolve(self, request) integration = sentry_sdk.get_client().get_integration(AioHttpIntegration) if integration is None: return rv name = None try: if integration.transaction_style == "handler_name": name = transaction_from_function(rv.handler) elif integration.transaction_style == "method_and_path_pattern": route_info = rv.get_info() pattern = route_info.get("path") or route_info.get("formatter") name = "{} {}".format(request.method, pattern) except Exception: pass if name is not None: sentry_sdk.get_current_scope().set_transaction_name( name, source=SOURCE_FOR_STYLE[integration.transaction_style], ) return rv UrlDispatcher.resolve = sentry_urldispatcher_resolve old_client_session_init = ClientSession.__init__ @ensure_integration_enabled(AioHttpIntegration, old_client_session_init) def init(*args, **kwargs): # type: (Any, Any) -> None client_trace_configs = list(kwargs.get("trace_configs") or ()) trace_config = create_trace_config() client_trace_configs.append(trace_config) kwargs["trace_configs"] = client_trace_configs return old_client_session_init(*args, **kwargs) ClientSession.__init__ = init def create_trace_config(): # type: () -> TraceConfig async def on_request_start(session, trace_config_ctx, params): # type: (ClientSession, SimpleNamespace, TraceRequestStartParams) -> None if sentry_sdk.get_client().get_integration(AioHttpIntegration) is None: return method = params.method.upper() parsed_url = None with capture_internal_exceptions(): parsed_url = parse_url(str(params.url), sanitize=False) span = sentry_sdk.start_span( op=OP.HTTP_CLIENT, name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), origin=AioHttpIntegration.origin, ) span.set_data(SPANDATA.HTTP_METHOD, method) if parsed_url is not None: span.set_data("url", parsed_url.url) span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) client = sentry_sdk.get_client() if should_propagate_trace(client, str(params.url)): for ( key, value, ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers( span=span ): logger.debug( "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( key=key, value=value, url=params.url ) ) if key == BAGGAGE_HEADER_NAME and params.headers.get( BAGGAGE_HEADER_NAME ): # do not overwrite any existing baggage, just append to it params.headers[key] += "," + value else: params.headers[key] = value trace_config_ctx.span = span async def on_request_end(session, trace_config_ctx, params): # type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None if trace_config_ctx.span is None: return span = trace_config_ctx.span span.set_http_status(int(params.response.status)) span.set_data("reason", params.response.reason) span.finish() trace_config = TraceConfig() trace_config.on_request_start.append(on_request_start) trace_config.on_request_end.append(on_request_end) return trace_config def _make_request_processor(weak_request): # type: (weakref.ReferenceType[Request]) -> EventProcessor def aiohttp_processor( event, # type: Event hint, # type: dict[str, Tuple[type, BaseException, Any]] ): # type: (...) -> Event request = weak_request() if request is None: return event with capture_internal_exceptions(): request_info = event.setdefault("request", {}) request_info["url"] = "%s://%s%s" % ( request.scheme, request.host, request.path, ) request_info["query_string"] = request.query_string request_info["method"] = request.method request_info["env"] = {"REMOTE_ADDR": request.remote} request_info["headers"] = _filter_headers(dict(request.headers)) # Just attach raw data here if it is within bounds, if available. # Unfortunately there's no way to get structured data from aiohttp # without awaiting on some coroutine. request_info["data"] = get_aiohttp_request_data(request) return event return aiohttp_processor def _capture_exception(): # type: () -> ExcInfo exc_info = sys.exc_info() event, hint = event_from_exception( exc_info, client_options=sentry_sdk.get_client().options, mechanism={"type": "aiohttp", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) return exc_info BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]" def get_aiohttp_request_data(request): # type: (Request) -> Union[Optional[str], AnnotatedValue] bytes_body = request._read_bytes if bytes_body is not None: # we have body to show if not request_body_within_bounds(sentry_sdk.get_client(), len(bytes_body)): return AnnotatedValue.removed_because_over_size_limit() encoding = request.charset or "utf-8" return bytes_body.decode(encoding, "replace") if request.can_read_body: # body exists but we can't show it return BODY_NOT_READ_MESSAGE # request has no body return None sentry-python-2.18.0/sentry_sdk/integrations/anthropic.py000066400000000000000000000224571471214654000237170ustar00rootroot00000000000000from functools import wraps from typing import TYPE_CHECKING import sentry_sdk from sentry_sdk.ai.monitoring import record_token_usage from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, package_version, ) try: from anthropic.resources import AsyncMessages, Messages if TYPE_CHECKING: from anthropic.types import MessageStreamEvent except ImportError: raise DidNotEnable("Anthropic not installed") if TYPE_CHECKING: from typing import Any, AsyncIterator, Iterator from sentry_sdk.tracing import Span class AnthropicIntegration(Integration): identifier = "anthropic" origin = f"auto.ai.{identifier}" def __init__(self, include_prompts=True): # type: (AnthropicIntegration, bool) -> None self.include_prompts = include_prompts @staticmethod def setup_once(): # type: () -> None version = package_version("anthropic") if version is None: raise DidNotEnable("Unparsable anthropic version.") if version < (0, 16): raise DidNotEnable("anthropic 0.16 or newer required.") Messages.create = _wrap_message_create(Messages.create) AsyncMessages.create = _wrap_message_create_async(AsyncMessages.create) def _capture_exception(exc): # type: (Any) -> None event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, mechanism={"type": "anthropic", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _calculate_token_usage(result, span): # type: (Messages, Span) -> None input_tokens = 0 output_tokens = 0 if hasattr(result, "usage"): usage = result.usage if hasattr(usage, "input_tokens") and isinstance(usage.input_tokens, int): input_tokens = usage.input_tokens if hasattr(usage, "output_tokens") and isinstance(usage.output_tokens, int): output_tokens = usage.output_tokens total_tokens = input_tokens + output_tokens record_token_usage(span, input_tokens, output_tokens, total_tokens) def _get_responses(content): # type: (list[Any]) -> list[dict[str, Any]] """ Get JSON of a Anthropic responses. """ responses = [] for item in content: if hasattr(item, "text"): responses.append( { "type": item.type, "text": item.text, } ) return responses def _collect_ai_data(event, input_tokens, output_tokens, content_blocks): # type: (MessageStreamEvent, int, int, list[str]) -> tuple[int, int, list[str]] """ Count token usage and collect content blocks from the AI streaming response. """ with capture_internal_exceptions(): if hasattr(event, "type"): if event.type == "message_start": usage = event.message.usage input_tokens += usage.input_tokens output_tokens += usage.output_tokens elif event.type == "content_block_start": pass elif event.type == "content_block_delta": if hasattr(event.delta, "text"): content_blocks.append(event.delta.text) elif event.type == "content_block_stop": pass elif event.type == "message_delta": output_tokens += event.usage.output_tokens return input_tokens, output_tokens, content_blocks def _add_ai_data_to_span( span, integration, input_tokens, output_tokens, content_blocks ): # type: (Span, AnthropicIntegration, int, int, list[str]) -> None """ Add token usage and content blocks from the AI streaming response to the span. """ with capture_internal_exceptions(): if should_send_default_pii() and integration.include_prompts: complete_message = "".join(content_blocks) span.set_data( SPANDATA.AI_RESPONSES, [{"type": "text", "text": complete_message}], ) total_tokens = input_tokens + output_tokens record_token_usage(span, input_tokens, output_tokens, total_tokens) span.set_data(SPANDATA.AI_STREAMING, True) def _sentry_patched_create_common(f, *args, **kwargs): # type: (Any, *Any, **Any) -> Any integration = kwargs.pop("integration") if integration is None: return f(*args, **kwargs) if "messages" not in kwargs: return f(*args, **kwargs) try: iter(kwargs["messages"]) except TypeError: return f(*args, **kwargs) span = sentry_sdk.start_span( op=OP.ANTHROPIC_MESSAGES_CREATE, description="Anthropic messages create", origin=AnthropicIntegration.origin, ) span.__enter__() result = yield f, args, kwargs # add data to span and finish it messages = list(kwargs["messages"]) model = kwargs.get("model") with capture_internal_exceptions(): span.set_data(SPANDATA.AI_MODEL_ID, model) span.set_data(SPANDATA.AI_STREAMING, False) if should_send_default_pii() and integration.include_prompts: span.set_data(SPANDATA.AI_INPUT_MESSAGES, messages) if hasattr(result, "content"): if should_send_default_pii() and integration.include_prompts: span.set_data(SPANDATA.AI_RESPONSES, _get_responses(result.content)) _calculate_token_usage(result, span) span.__exit__(None, None, None) # Streaming response elif hasattr(result, "_iterator"): old_iterator = result._iterator def new_iterator(): # type: () -> Iterator[MessageStreamEvent] input_tokens = 0 output_tokens = 0 content_blocks = [] # type: list[str] for event in old_iterator: input_tokens, output_tokens, content_blocks = _collect_ai_data( event, input_tokens, output_tokens, content_blocks ) if event.type != "message_stop": yield event _add_ai_data_to_span( span, integration, input_tokens, output_tokens, content_blocks ) span.__exit__(None, None, None) async def new_iterator_async(): # type: () -> AsyncIterator[MessageStreamEvent] input_tokens = 0 output_tokens = 0 content_blocks = [] # type: list[str] async for event in old_iterator: input_tokens, output_tokens, content_blocks = _collect_ai_data( event, input_tokens, output_tokens, content_blocks ) if event.type != "message_stop": yield event _add_ai_data_to_span( span, integration, input_tokens, output_tokens, content_blocks ) span.__exit__(None, None, None) if str(type(result._iterator)) == "": result._iterator = new_iterator_async() else: result._iterator = new_iterator() else: span.set_data("unknown_response", True) span.__exit__(None, None, None) return result def _wrap_message_create(f): # type: (Any) -> Any def _execute_sync(f, *args, **kwargs): # type: (Any, *Any, **Any) -> Any gen = _sentry_patched_create_common(f, *args, **kwargs) try: f, args, kwargs = next(gen) except StopIteration as e: return e.value try: try: result = f(*args, **kwargs) except Exception as exc: _capture_exception(exc) raise exc from None return gen.send(result) except StopIteration as e: return e.value @wraps(f) def _sentry_patched_create_sync(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(AnthropicIntegration) kwargs["integration"] = integration return _execute_sync(f, *args, **kwargs) return _sentry_patched_create_sync def _wrap_message_create_async(f): # type: (Any) -> Any async def _execute_async(f, *args, **kwargs): # type: (Any, *Any, **Any) -> Any gen = _sentry_patched_create_common(f, *args, **kwargs) try: f, args, kwargs = next(gen) except StopIteration as e: return await e.value try: try: result = await f(*args, **kwargs) except Exception as exc: _capture_exception(exc) raise exc from None return gen.send(result) except StopIteration as e: return e.value @wraps(f) async def _sentry_patched_create_async(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(AnthropicIntegration) kwargs["integration"] = integration return await _execute_async(f, *args, **kwargs) return _sentry_patched_create_async sentry-python-2.18.0/sentry_sdk/integrations/argv.py000066400000000000000000000016171471214654000226620ustar00rootroot00000000000000import sys import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional from sentry_sdk._types import Event, Hint class ArgvIntegration(Integration): identifier = "argv" @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def processor(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] if sentry_sdk.get_client().get_integration(ArgvIntegration) is not None: extra = event.setdefault("extra", {}) # If some event processor decided to set extra to e.g. an # `int`, don't crash. Not here. if isinstance(extra, dict): extra["sys.argv"] = sys.argv return event sentry-python-2.18.0/sentry_sdk/integrations/ariadne.py000066400000000000000000000135021471214654000233220ustar00rootroot00000000000000from importlib import import_module import sentry_sdk from sentry_sdk import get_client, capture_event from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.integrations._wsgi_common import request_body_within_bounds from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, package_version, ) try: # importing like this is necessary due to name shadowing in ariadne # (ariadne.graphql is also a function) ariadne_graphql = import_module("ariadne.graphql") except ImportError: raise DidNotEnable("ariadne is not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Dict, List, Optional from ariadne.types import GraphQLError, GraphQLResult, GraphQLSchema, QueryParser # type: ignore from graphql.language.ast import DocumentNode # type: ignore from sentry_sdk._types import Event, EventProcessor class AriadneIntegration(Integration): identifier = "ariadne" @staticmethod def setup_once(): # type: () -> None version = package_version("ariadne") if version is None: raise DidNotEnable("Unparsable ariadne version.") if version < (0, 20): raise DidNotEnable("ariadne 0.20 or newer required.") ignore_logger("ariadne") _patch_graphql() def _patch_graphql(): # type: () -> None old_parse_query = ariadne_graphql.parse_query old_handle_errors = ariadne_graphql.handle_graphql_errors old_handle_query_result = ariadne_graphql.handle_query_result @ensure_integration_enabled(AriadneIntegration, old_parse_query) def _sentry_patched_parse_query(context_value, query_parser, data): # type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode event_processor = _make_request_event_processor(data) sentry_sdk.get_isolation_scope().add_event_processor(event_processor) result = old_parse_query(context_value, query_parser, data) return result @ensure_integration_enabled(AriadneIntegration, old_handle_errors) def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs): # type: (List[GraphQLError], Any, Any) -> GraphQLResult result = old_handle_errors(errors, *args, **kwargs) event_processor = _make_response_event_processor(result[1]) sentry_sdk.get_isolation_scope().add_event_processor(event_processor) client = get_client() if client.is_active(): with capture_internal_exceptions(): for error in errors: event, hint = event_from_exception( error, client_options=client.options, mechanism={ "type": AriadneIntegration.identifier, "handled": False, }, ) capture_event(event, hint=hint) return result @ensure_integration_enabled(AriadneIntegration, old_handle_query_result) def _sentry_patched_handle_query_result(result, *args, **kwargs): # type: (Any, Any, Any) -> GraphQLResult query_result = old_handle_query_result(result, *args, **kwargs) event_processor = _make_response_event_processor(query_result[1]) sentry_sdk.get_isolation_scope().add_event_processor(event_processor) client = get_client() if client.is_active(): with capture_internal_exceptions(): for error in result.errors or []: event, hint = event_from_exception( error, client_options=client.options, mechanism={ "type": AriadneIntegration.identifier, "handled": False, }, ) capture_event(event, hint=hint) return query_result ariadne_graphql.parse_query = _sentry_patched_parse_query # type: ignore ariadne_graphql.handle_graphql_errors = _sentry_patched_handle_graphql_errors # type: ignore ariadne_graphql.handle_query_result = _sentry_patched_handle_query_result # type: ignore def _make_request_event_processor(data): # type: (GraphQLSchema) -> EventProcessor """Add request data and api_target to events.""" def inner(event, hint): # type: (Event, dict[str, Any]) -> Event if not isinstance(data, dict): return event with capture_internal_exceptions(): try: content_length = int( (data.get("headers") or {}).get("Content-Length", 0) ) except (TypeError, ValueError): return event if should_send_default_pii() and request_body_within_bounds( get_client(), content_length ): request_info = event.setdefault("request", {}) request_info["api_target"] = "graphql" request_info["data"] = data elif event.get("request", {}).get("data"): del event["request"]["data"] return event return inner def _make_response_event_processor(response): # type: (Dict[str, Any]) -> EventProcessor """Add response data to the event's response context.""" def inner(event, hint): # type: (Event, dict[str, Any]) -> Event with capture_internal_exceptions(): if should_send_default_pii() and response.get("errors"): contexts = event.setdefault("contexts", {}) contexts["response"] = { "data": response, } return event return inner sentry-python-2.18.0/sentry_sdk/integrations/arq.py000066400000000000000000000163041471214654000225050ustar00rootroot00000000000000import sys import sentry_sdk from sentry_sdk.consts import OP, SPANSTATUS from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, SENSITIVE_DATA_SUBSTITUTE, parse_version, reraise, ) try: import arq.worker from arq.version import VERSION as ARQ_VERSION from arq.connections import ArqRedis from arq.worker import JobExecutionFailed, Retry, RetryJob, Worker except ImportError: raise DidNotEnable("Arq is not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Dict, Optional, Union from sentry_sdk._types import EventProcessor, Event, ExcInfo, Hint from arq.cron import CronJob from arq.jobs import Job from arq.typing import WorkerCoroutine from arq.worker import Function ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob) class ArqIntegration(Integration): identifier = "arq" origin = f"auto.queue.{identifier}" @staticmethod def setup_once(): # type: () -> None try: if isinstance(ARQ_VERSION, str): version = parse_version(ARQ_VERSION) else: version = ARQ_VERSION.version[:2] except (TypeError, ValueError): version = None if version is None: raise DidNotEnable("Unparsable arq version: {}".format(ARQ_VERSION)) if version < (0, 23): raise DidNotEnable("arq 0.23 or newer required.") patch_enqueue_job() patch_run_job() patch_create_worker() ignore_logger("arq.worker") def patch_enqueue_job(): # type: () -> None old_enqueue_job = ArqRedis.enqueue_job async def _sentry_enqueue_job(self, function, *args, **kwargs): # type: (ArqRedis, str, *Any, **Any) -> Optional[Job] integration = sentry_sdk.get_client().get_integration(ArqIntegration) if integration is None: return await old_enqueue_job(self, function, *args, **kwargs) with sentry_sdk.start_span( op=OP.QUEUE_SUBMIT_ARQ, name=function, origin=ArqIntegration.origin ): return await old_enqueue_job(self, function, *args, **kwargs) ArqRedis.enqueue_job = _sentry_enqueue_job def patch_run_job(): # type: () -> None old_run_job = Worker.run_job async def _sentry_run_job(self, job_id, score): # type: (Worker, str, int) -> None integration = sentry_sdk.get_client().get_integration(ArqIntegration) if integration is None: return await old_run_job(self, job_id, score) with sentry_sdk.isolation_scope() as scope: scope._name = "arq" scope.clear_breadcrumbs() transaction = Transaction( name="unknown arq task", status="ok", op=OP.QUEUE_TASK_ARQ, source=TRANSACTION_SOURCE_TASK, origin=ArqIntegration.origin, ) with sentry_sdk.start_transaction(transaction): return await old_run_job(self, job_id, score) Worker.run_job = _sentry_run_job def _capture_exception(exc_info): # type: (ExcInfo) -> None scope = sentry_sdk.get_current_scope() if scope.transaction is not None: if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS: scope.transaction.set_status(SPANSTATUS.ABORTED) return scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR) event, hint = event_from_exception( exc_info, client_options=sentry_sdk.get_client().options, mechanism={"type": ArqIntegration.identifier, "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _make_event_processor(ctx, *args, **kwargs): # type: (Dict[Any, Any], *Any, **Any) -> EventProcessor def event_processor(event, hint): # type: (Event, Hint) -> Optional[Event] with capture_internal_exceptions(): scope = sentry_sdk.get_current_scope() if scope.transaction is not None: scope.transaction.name = ctx["job_name"] event["transaction"] = ctx["job_name"] tags = event.setdefault("tags", {}) tags["arq_task_id"] = ctx["job_id"] tags["arq_task_retry"] = ctx["job_try"] > 1 extra = event.setdefault("extra", {}) extra["arq-job"] = { "task": ctx["job_name"], "args": ( args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE ), "kwargs": ( kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE ), "retry": ctx["job_try"], } return event return event_processor def _wrap_coroutine(name, coroutine): # type: (str, WorkerCoroutine) -> WorkerCoroutine async def _sentry_coroutine(ctx, *args, **kwargs): # type: (Dict[Any, Any], *Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(ArqIntegration) if integration is None: return await coroutine(ctx, *args, **kwargs) sentry_sdk.get_isolation_scope().add_event_processor( _make_event_processor({**ctx, "job_name": name}, *args, **kwargs) ) try: result = await coroutine(ctx, *args, **kwargs) except Exception: exc_info = sys.exc_info() _capture_exception(exc_info) reraise(*exc_info) return result return _sentry_coroutine def patch_create_worker(): # type: () -> None old_create_worker = arq.worker.create_worker @ensure_integration_enabled(ArqIntegration, old_create_worker) def _sentry_create_worker(*args, **kwargs): # type: (*Any, **Any) -> Worker settings_cls = args[0] if hasattr(settings_cls, "functions"): settings_cls.functions = [ _get_arq_function(func) for func in settings_cls.functions ] if hasattr(settings_cls, "cron_jobs"): settings_cls.cron_jobs = [ _get_arq_cron_job(cron_job) for cron_job in settings_cls.cron_jobs ] if "functions" in kwargs: kwargs["functions"] = [ _get_arq_function(func) for func in kwargs["functions"] ] if "cron_jobs" in kwargs: kwargs["cron_jobs"] = [ _get_arq_cron_job(cron_job) for cron_job in kwargs["cron_jobs"] ] return old_create_worker(*args, **kwargs) arq.worker.create_worker = _sentry_create_worker def _get_arq_function(func): # type: (Union[str, Function, WorkerCoroutine]) -> Function arq_func = arq.worker.func(func) arq_func.coroutine = _wrap_coroutine(arq_func.name, arq_func.coroutine) return arq_func def _get_arq_cron_job(cron_job): # type: (CronJob) -> CronJob cron_job.coroutine = _wrap_coroutine(cron_job.name, cron_job.coroutine) return cron_job sentry-python-2.18.0/sentry_sdk/integrations/asgi.py000066400000000000000000000306201471214654000226420ustar00rootroot00000000000000""" An ASGI middleware. Based on Tom Christie's `sentry-asgi `. """ import asyncio import inspect from copy import deepcopy from functools import partial import sentry_sdk from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.integrations._asgi_common import ( _get_headers, _get_request_data, _get_url, ) from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, nullcontext, ) from sentry_sdk.sessions import track_session from sentry_sdk.tracing import ( SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE, TRANSACTION_SOURCE_URL, TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_CUSTOM, ) from sentry_sdk.utils import ( ContextVar, event_from_exception, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, logger, transaction_from_function, _get_installed_modules, ) from sentry_sdk.tracing import Transaction from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Optional from typing import Tuple from sentry_sdk._types import Event, Hint _asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied") _DEFAULT_TRANSACTION_NAME = "generic ASGI request" TRANSACTION_STYLE_VALUES = ("endpoint", "url") def _capture_exception(exc, mechanism_type="asgi"): # type: (Any, str) -> None event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, mechanism={"type": mechanism_type, "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _looks_like_asgi3(app): # type: (Any) -> bool """ Try to figure out if an application object supports ASGI3. This is how uvicorn figures out the application version as well. """ if inspect.isclass(app): return hasattr(app, "__await__") elif inspect.isfunction(app): return asyncio.iscoroutinefunction(app) else: call = getattr(app, "__call__", None) # noqa return asyncio.iscoroutinefunction(call) class SentryAsgiMiddleware: __slots__ = ( "app", "__call__", "transaction_style", "mechanism_type", "span_origin", "http_methods_to_capture", ) def __init__( self, app, # type: Any unsafe_context_data=False, # type: bool transaction_style="endpoint", # type: str mechanism_type="asgi", # type: str span_origin="manual", # type: str http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: Tuple[str, ...] ): # type: (...) -> None """ Instrument an ASGI application with Sentry. Provides HTTP/websocket data to sent events and basic handling for exceptions bubbling up through the middleware. :param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default. """ if not unsafe_context_data and not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. raise RuntimeError( "The ASGI middleware for Sentry requires Python 3.7+ " "or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE ) if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) asgi_middleware_while_using_starlette_or_fastapi = ( mechanism_type == "asgi" and "starlette" in _get_installed_modules() ) if asgi_middleware_while_using_starlette_or_fastapi: logger.warning( "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI. " "Please remove 'SentryAsgiMiddleware' from your project. " "See https://docs.sentry.io/platforms/python/guides/asgi/ for more information." ) self.transaction_style = transaction_style self.mechanism_type = mechanism_type self.span_origin = span_origin self.app = app self.http_methods_to_capture = http_methods_to_capture if _looks_like_asgi3(app): self.__call__ = self._run_asgi3 # type: Callable[..., Any] else: self.__call__ = self._run_asgi2 def _run_asgi2(self, scope): # type: (Any) -> Any async def inner(receive, send): # type: (Any, Any) -> Any return await self._run_app(scope, receive, send, asgi_version=2) return inner async def _run_asgi3(self, scope, receive, send): # type: (Any, Any, Any) -> Any return await self._run_app(scope, receive, send, asgi_version=3) async def _run_app(self, scope, receive, send, asgi_version): # type: (Any, Any, Any, Any, int) -> Any is_recursive_asgi_middleware = _asgi_middleware_applied.get(False) is_lifespan = scope["type"] == "lifespan" if is_recursive_asgi_middleware or is_lifespan: try: if asgi_version == 2: return await self.app(scope)(receive, send) else: return await self.app(scope, receive, send) except Exception as exc: _capture_exception(exc, mechanism_type=self.mechanism_type) raise exc from None _asgi_middleware_applied.set(True) try: with sentry_sdk.isolation_scope() as sentry_scope: with track_session(sentry_scope, session_mode="request"): sentry_scope.clear_breadcrumbs() sentry_scope._name = "asgi" processor = partial(self.event_processor, asgi_scope=scope) sentry_scope.add_event_processor(processor) ty = scope["type"] ( transaction_name, transaction_source, ) = self._get_transaction_name_and_source( self.transaction_style, scope, ) method = scope.get("method", "").upper() transaction = None if method in self.http_methods_to_capture: if ty in ("http", "websocket"): transaction = continue_trace( _get_headers(scope), op="{}.server".format(ty), name=transaction_name, source=transaction_source, origin=self.span_origin, ) logger.debug( "[ASGI] Created transaction (continuing trace): %s", transaction, ) else: transaction = Transaction( op=OP.HTTP_SERVER, name=transaction_name, source=transaction_source, origin=self.span_origin, ) logger.debug( "[ASGI] Created transaction (new): %s", transaction ) transaction.set_tag("asgi.type", ty) logger.debug( "[ASGI] Set transaction name and source on transaction: '%s' / '%s'", transaction.name, transaction.source, ) with ( sentry_sdk.start_transaction( transaction, custom_sampling_context={"asgi_scope": scope}, ) if transaction is not None else nullcontext() ): logger.debug("[ASGI] Started transaction: %s", transaction) try: async def _sentry_wrapped_send(event): # type: (Dict[str, Any]) -> Any if transaction is not None: is_http_response = ( event.get("type") == "http.response.start" and "status" in event ) if is_http_response: transaction.set_http_status(event["status"]) return await send(event) if asgi_version == 2: return await self.app(scope)( receive, _sentry_wrapped_send ) else: return await self.app( scope, receive, _sentry_wrapped_send ) except Exception as exc: _capture_exception(exc, mechanism_type=self.mechanism_type) raise exc from None finally: _asgi_middleware_applied.set(False) def event_processor(self, event, hint, asgi_scope): # type: (Event, Hint, Any) -> Optional[Event] request_data = event.get("request", {}) request_data.update(_get_request_data(asgi_scope)) event["request"] = deepcopy(request_data) # Only set transaction name if not already set by Starlette or FastAPI (or other frameworks) already_set = event["transaction"] != _DEFAULT_TRANSACTION_NAME and event[ "transaction_info" ].get("source") in [ TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_ROUTE, TRANSACTION_SOURCE_CUSTOM, ] if not already_set: name, source = self._get_transaction_name_and_source( self.transaction_style, asgi_scope ) event["transaction"] = name event["transaction_info"] = {"source": source} logger.debug( "[ASGI] Set transaction name and source in event_processor: '%s' / '%s'", event["transaction"], event["transaction_info"]["source"], ) return event # Helper functions. # # Note: Those functions are not public API. If you want to mutate request # data to your liking it's recommended to use the `before_send` callback # for that. def _get_transaction_name_and_source(self, transaction_style, asgi_scope): # type: (SentryAsgiMiddleware, str, Any) -> Tuple[str, str] name = None source = SOURCE_FOR_STYLE[transaction_style] ty = asgi_scope.get("type") if transaction_style == "endpoint": endpoint = asgi_scope.get("endpoint") # Webframeworks like Starlette mutate the ASGI env once routing is # done, which is sometime after the request has started. If we have # an endpoint, overwrite our generic transaction name. if endpoint: name = transaction_from_function(endpoint) or "" else: name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None) source = TRANSACTION_SOURCE_URL elif transaction_style == "url": # FastAPI includes the route object in the scope to let Sentry extract the # path from it for the transaction name route = asgi_scope.get("route") if route: path = getattr(route, "path", None) if path is not None: name = path else: name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None) source = TRANSACTION_SOURCE_URL if name is None: name = _DEFAULT_TRANSACTION_NAME source = TRANSACTION_SOURCE_ROUTE return name, source return name, source sentry-python-2.18.0/sentry_sdk/integrations/asyncio.py000066400000000000000000000061771471214654000233760ustar00rootroot00000000000000import sys import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.utils import event_from_exception, reraise try: import asyncio from asyncio.tasks import Task except ImportError: raise DidNotEnable("asyncio not available") from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from collections.abc import Coroutine from sentry_sdk._types import ExcInfo def get_name(coro): # type: (Any) -> str return ( getattr(coro, "__qualname__", None) or getattr(coro, "__name__", None) or "coroutine without __name__" ) def patch_asyncio(): # type: () -> None orig_task_factory = None try: loop = asyncio.get_running_loop() orig_task_factory = loop.get_task_factory() def _sentry_task_factory(loop, coro, **kwargs): # type: (asyncio.AbstractEventLoop, Coroutine[Any, Any, Any], Any) -> asyncio.Future[Any] async def _coro_creating_hub_and_span(): # type: () -> Any result = None with sentry_sdk.isolation_scope(): with sentry_sdk.start_span( op=OP.FUNCTION, name=get_name(coro), origin=AsyncioIntegration.origin, ): try: result = await coro except Exception: reraise(*_capture_exception()) return result # Trying to use user set task factory (if there is one) if orig_task_factory: return orig_task_factory(loop, _coro_creating_hub_and_span(), **kwargs) # The default task factory in `asyncio` does not have its own function # but is just a couple of lines in `asyncio.base_events.create_task()` # Those lines are copied here. # WARNING: # If the default behavior of the task creation in asyncio changes, # this will break! task = Task(_coro_creating_hub_and_span(), loop=loop, **kwargs) if task._source_traceback: # type: ignore del task._source_traceback[-1] # type: ignore return task loop.set_task_factory(_sentry_task_factory) # type: ignore except RuntimeError: # When there is no running loop, we have nothing to patch. pass def _capture_exception(): # type: () -> ExcInfo exc_info = sys.exc_info() client = sentry_sdk.get_client() integration = client.get_integration(AsyncioIntegration) if integration is not None: event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "asyncio", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) return exc_info class AsyncioIntegration(Integration): identifier = "asyncio" origin = f"auto.function.{identifier}" @staticmethod def setup_once(): # type: () -> None patch_asyncio() sentry-python-2.18.0/sentry_sdk/integrations/asyncpg.py000066400000000000000000000146041471214654000233670ustar00rootroot00000000000000from __future__ import annotations import contextlib from typing import Any, TypeVar, Callable, Awaitable, Iterator import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.tracing import Span from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import ( ensure_integration_enabled, parse_version, capture_internal_exceptions, ) try: import asyncpg # type: ignore[import-not-found] from asyncpg.cursor import BaseCursor # type: ignore except ImportError: raise DidNotEnable("asyncpg not installed.") # asyncpg.__version__ is a string containing the semantic version in the form of ".." asyncpg_version = parse_version(asyncpg.__version__) if asyncpg_version is not None and asyncpg_version < (0, 23, 0): raise DidNotEnable("asyncpg >= 0.23.0 required") class AsyncPGIntegration(Integration): identifier = "asyncpg" origin = f"auto.db.{identifier}" _record_params = False def __init__(self, *, record_params: bool = False): AsyncPGIntegration._record_params = record_params @staticmethod def setup_once() -> None: asyncpg.Connection.execute = _wrap_execute( asyncpg.Connection.execute, ) asyncpg.Connection._execute = _wrap_connection_method( asyncpg.Connection._execute ) asyncpg.Connection._executemany = _wrap_connection_method( asyncpg.Connection._executemany, executemany=True ) asyncpg.Connection.cursor = _wrap_cursor_creation(asyncpg.Connection.cursor) asyncpg.Connection.prepare = _wrap_connection_method(asyncpg.Connection.prepare) asyncpg.connect_utils._connect_addr = _wrap_connect_addr( asyncpg.connect_utils._connect_addr ) T = TypeVar("T") def _wrap_execute(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]: async def _inner(*args: Any, **kwargs: Any) -> T: if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None: return await f(*args, **kwargs) # Avoid recording calls to _execute twice. # Calls to Connection.execute with args also call # Connection._execute, which is recorded separately # args[0] = the connection object, args[1] is the query if len(args) > 2: return await f(*args, **kwargs) query = args[1] with record_sql_queries( cursor=None, query=query, params_list=None, paramstyle=None, executemany=False, span_origin=AsyncPGIntegration.origin, ) as span: res = await f(*args, **kwargs) with capture_internal_exceptions(): add_query_source(span) return res return _inner SubCursor = TypeVar("SubCursor", bound=BaseCursor) @contextlib.contextmanager def _record( cursor: SubCursor | None, query: str, params_list: tuple[Any, ...] | None, *, executemany: bool = False, ) -> Iterator[Span]: integration = sentry_sdk.get_client().get_integration(AsyncPGIntegration) if integration is not None and not integration._record_params: params_list = None param_style = "pyformat" if params_list else None with record_sql_queries( cursor=cursor, query=query, params_list=params_list, paramstyle=param_style, executemany=executemany, record_cursor_repr=cursor is not None, span_origin=AsyncPGIntegration.origin, ) as span: yield span def _wrap_connection_method( f: Callable[..., Awaitable[T]], *, executemany: bool = False ) -> Callable[..., Awaitable[T]]: async def _inner(*args: Any, **kwargs: Any) -> T: if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None: return await f(*args, **kwargs) query = args[1] params_list = args[2] if len(args) > 2 else None with _record(None, query, params_list, executemany=executemany) as span: _set_db_data(span, args[0]) res = await f(*args, **kwargs) return res return _inner def _wrap_cursor_creation(f: Callable[..., T]) -> Callable[..., T]: @ensure_integration_enabled(AsyncPGIntegration, f) def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807 query = args[1] params_list = args[2] if len(args) > 2 else None with _record( None, query, params_list, executemany=False, ) as span: _set_db_data(span, args[0]) res = f(*args, **kwargs) span.set_data("db.cursor", res) return res return _inner def _wrap_connect_addr(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]: async def _inner(*args: Any, **kwargs: Any) -> T: if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None: return await f(*args, **kwargs) user = kwargs["params"].user database = kwargs["params"].database with sentry_sdk.start_span( op=OP.DB, name="connect", origin=AsyncPGIntegration.origin, ) as span: span.set_data(SPANDATA.DB_SYSTEM, "postgresql") addr = kwargs.get("addr") if addr: try: span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) span.set_data(SPANDATA.SERVER_PORT, addr[1]) except IndexError: pass span.set_data(SPANDATA.DB_NAME, database) span.set_data(SPANDATA.DB_USER, user) with capture_internal_exceptions(): sentry_sdk.add_breadcrumb( message="connect", category="query", data=span._data ) res = await f(*args, **kwargs) return res return _inner def _set_db_data(span: Span, conn: Any) -> None: span.set_data(SPANDATA.DB_SYSTEM, "postgresql") addr = conn._addr if addr: try: span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) span.set_data(SPANDATA.SERVER_PORT, addr[1]) except IndexError: pass database = conn._params.database if database: span.set_data(SPANDATA.DB_NAME, database) user = conn._params.user if user: span.set_data(SPANDATA.DB_USER, user) sentry-python-2.18.0/sentry_sdk/integrations/atexit.py000066400000000000000000000031641471214654000232200ustar00rootroot00000000000000import os import sys import atexit import sentry_sdk from sentry_sdk.utils import logger from sentry_sdk.integrations import Integration from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Optional def default_callback(pending, timeout): # type: (int, int) -> None """This is the default shutdown callback that is set on the options. It prints out a message to stderr that informs the user that some events are still pending and the process is waiting for them to flush out. """ def echo(msg): # type: (str) -> None sys.stderr.write(msg + "\n") echo("Sentry is attempting to send %i pending events" % pending) echo("Waiting up to %s seconds" % timeout) echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C")) sys.stderr.flush() class AtexitIntegration(Integration): identifier = "atexit" def __init__(self, callback=None): # type: (Optional[Any]) -> None if callback is None: callback = default_callback self.callback = callback @staticmethod def setup_once(): # type: () -> None @atexit.register def _shutdown(): # type: () -> None client = sentry_sdk.get_client() integration = client.get_integration(AtexitIntegration) if integration is None: return logger.debug("atexit: got shutdown signal") logger.debug("atexit: shutting down client") sentry_sdk.get_isolation_scope().end_session() client.close(callback=integration.callback) sentry-python-2.18.0/sentry_sdk/integrations/aws_lambda.py000066400000000000000000000426521471214654000240210ustar00rootroot00000000000000import functools import json import re import sys from copy import deepcopy from datetime import datetime, timedelta, timezone from os import environ import sentry_sdk from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, ensure_integration_enabled, event_from_exception, logger, TimeoutThread, reraise, ) from sentry_sdk.integrations import Integration from sentry_sdk.integrations._wsgi_common import _filter_headers from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import TypeVar from typing import Callable from typing import Optional from sentry_sdk._types import EventProcessor, Event, Hint F = TypeVar("F", bound=Callable[..., Any]) # Constants TIMEOUT_WARNING_BUFFER = 1500 # Buffer time required to send timeout warning to Sentry MILLIS_TO_SECONDS = 1000.0 def _wrap_init_error(init_error): # type: (F) -> F @ensure_integration_enabled(AwsLambdaIntegration, init_error) def sentry_init_error(*args, **kwargs): # type: (*Any, **Any) -> Any client = sentry_sdk.get_client() with capture_internal_exceptions(): sentry_sdk.get_isolation_scope().clear_breadcrumbs() exc_info = sys.exc_info() if exc_info and all(exc_info): sentry_event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "aws_lambda", "handled": False}, ) sentry_sdk.capture_event(sentry_event, hint=hint) else: # Fall back to AWS lambdas JSON representation of the error sentry_event = _event_from_error_json(json.loads(args[1])) sentry_sdk.capture_event(sentry_event) return init_error(*args, **kwargs) return sentry_init_error # type: ignore def _wrap_handler(handler): # type: (F) -> F @functools.wraps(handler) def sentry_handler(aws_event, aws_context, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> Any # Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html, # `event` here is *likely* a dictionary, but also might be a number of # other types (str, int, float, None). # # In some cases, it is a list (if the user is batch-invoking their # function, for example), in which case we'll use the first entry as a # representative from which to try pulling request data. (Presumably it # will be the same for all events in the list, since they're all hitting # the lambda in the same request.) client = sentry_sdk.get_client() integration = client.get_integration(AwsLambdaIntegration) if integration is None: return handler(aws_event, aws_context, *args, **kwargs) if isinstance(aws_event, list) and len(aws_event) >= 1: request_data = aws_event[0] batch_size = len(aws_event) else: request_data = aws_event batch_size = 1 if not isinstance(request_data, dict): # If we're not dealing with a dictionary, we won't be able to get # headers, path, http method, etc in any case, so it's fine that # this is empty request_data = {} configured_time = aws_context.get_remaining_time_in_millis() with sentry_sdk.isolation_scope() as scope: timeout_thread = None with capture_internal_exceptions(): scope.clear_breadcrumbs() scope.add_event_processor( _make_request_event_processor( request_data, aws_context, configured_time ) ) scope.set_tag( "aws_region", aws_context.invoked_function_arn.split(":")[3] ) if batch_size > 1: scope.set_tag("batch_request", True) scope.set_tag("batch_size", batch_size) # Starting the Timeout thread only if the configured time is greater than Timeout warning # buffer and timeout_warning parameter is set True. if ( integration.timeout_warning and configured_time > TIMEOUT_WARNING_BUFFER ): waiting_time = ( configured_time - TIMEOUT_WARNING_BUFFER ) / MILLIS_TO_SECONDS timeout_thread = TimeoutThread( waiting_time, configured_time / MILLIS_TO_SECONDS, ) # Starting the thread to raise timeout warning exception timeout_thread.start() headers = request_data.get("headers", {}) # Some AWS Services (ie. EventBridge) set headers as a list # or None, so we must ensure it is a dict if not isinstance(headers, dict): headers = {} transaction = continue_trace( headers, op=OP.FUNCTION_AWS, name=aws_context.function_name, source=TRANSACTION_SOURCE_COMPONENT, origin=AwsLambdaIntegration.origin, ) with sentry_sdk.start_transaction( transaction, custom_sampling_context={ "aws_event": aws_event, "aws_context": aws_context, }, ): try: return handler(aws_event, aws_context, *args, **kwargs) except Exception: exc_info = sys.exc_info() sentry_event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "aws_lambda", "handled": False}, ) sentry_sdk.capture_event(sentry_event, hint=hint) reraise(*exc_info) finally: if timeout_thread: timeout_thread.stop() return sentry_handler # type: ignore def _drain_queue(): # type: () -> None with capture_internal_exceptions(): client = sentry_sdk.get_client() integration = client.get_integration(AwsLambdaIntegration) if integration is not None: # Flush out the event queue before AWS kills the # process. client.flush() class AwsLambdaIntegration(Integration): identifier = "aws_lambda" origin = f"auto.function.{identifier}" def __init__(self, timeout_warning=False): # type: (bool) -> None self.timeout_warning = timeout_warning @staticmethod def setup_once(): # type: () -> None lambda_bootstrap = get_lambda_bootstrap() if not lambda_bootstrap: logger.warning( "Not running in AWS Lambda environment, " "AwsLambdaIntegration disabled (could not find bootstrap module)" ) return if not hasattr(lambda_bootstrap, "handle_event_request"): logger.warning( "Not running in AWS Lambda environment, " "AwsLambdaIntegration disabled (could not find handle_event_request)" ) return pre_37 = hasattr(lambda_bootstrap, "handle_http_request") # Python 3.6 if pre_37: old_handle_event_request = lambda_bootstrap.handle_event_request def sentry_handle_event_request(request_handler, *args, **kwargs): # type: (Any, *Any, **Any) -> Any request_handler = _wrap_handler(request_handler) return old_handle_event_request(request_handler, *args, **kwargs) lambda_bootstrap.handle_event_request = sentry_handle_event_request old_handle_http_request = lambda_bootstrap.handle_http_request def sentry_handle_http_request(request_handler, *args, **kwargs): # type: (Any, *Any, **Any) -> Any request_handler = _wrap_handler(request_handler) return old_handle_http_request(request_handler, *args, **kwargs) lambda_bootstrap.handle_http_request = sentry_handle_http_request # Patch to_json to drain the queue. This should work even when the # SDK is initialized inside of the handler old_to_json = lambda_bootstrap.to_json def sentry_to_json(*args, **kwargs): # type: (*Any, **Any) -> Any _drain_queue() return old_to_json(*args, **kwargs) lambda_bootstrap.to_json = sentry_to_json else: lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error( lambda_bootstrap.LambdaRuntimeClient.post_init_error ) old_handle_event_request = lambda_bootstrap.handle_event_request def sentry_handle_event_request( # type: ignore lambda_runtime_client, request_handler, *args, **kwargs ): request_handler = _wrap_handler(request_handler) return old_handle_event_request( lambda_runtime_client, request_handler, *args, **kwargs ) lambda_bootstrap.handle_event_request = sentry_handle_event_request # Patch the runtime client to drain the queue. This should work # even when the SDK is initialized inside of the handler def _wrap_post_function(f): # type: (F) -> F def inner(*args, **kwargs): # type: (*Any, **Any) -> Any _drain_queue() return f(*args, **kwargs) return inner # type: ignore lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = ( _wrap_post_function( lambda_bootstrap.LambdaRuntimeClient.post_invocation_result ) ) lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = ( _wrap_post_function( lambda_bootstrap.LambdaRuntimeClient.post_invocation_error ) ) def get_lambda_bootstrap(): # type: () -> Optional[Any] # Python 3.7: If the bootstrap module is *already imported*, it is the # one we actually want to use (no idea what's in __main__) # # Python 3.8: bootstrap is also importable, but will be the same file # as __main__ imported under a different name: # # sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__ # sys.modules['__main__'] is not sys.modules['bootstrap'] # # Python 3.9: bootstrap is in __main__.awslambdaricmain # # On container builds using the `aws-lambda-python-runtime-interface-client` # (awslamdaric) module, bootstrap is located in sys.modules['__main__'].bootstrap # # Such a setup would then make all monkeypatches useless. if "bootstrap" in sys.modules: return sys.modules["bootstrap"] elif "__main__" in sys.modules: module = sys.modules["__main__"] # python3.9 runtime if hasattr(module, "awslambdaricmain") and hasattr( module.awslambdaricmain, "bootstrap" ): return module.awslambdaricmain.bootstrap elif hasattr(module, "bootstrap"): # awslambdaric python module in container builds return module.bootstrap # python3.8 runtime return module else: return None def _make_request_event_processor(aws_event, aws_context, configured_timeout): # type: (Any, Any, Any) -> EventProcessor start_time = datetime.now(timezone.utc) def event_processor(sentry_event, hint, start_time=start_time): # type: (Event, Hint, datetime) -> Optional[Event] remaining_time_in_milis = aws_context.get_remaining_time_in_millis() exec_duration = configured_timeout - remaining_time_in_milis extra = sentry_event.setdefault("extra", {}) extra["lambda"] = { "function_name": aws_context.function_name, "function_version": aws_context.function_version, "invoked_function_arn": aws_context.invoked_function_arn, "aws_request_id": aws_context.aws_request_id, "execution_duration_in_millis": exec_duration, "remaining_time_in_millis": remaining_time_in_milis, } extra["cloudwatch logs"] = { "url": _get_cloudwatch_logs_url(aws_context, start_time), "log_group": aws_context.log_group_name, "log_stream": aws_context.log_stream_name, } request = sentry_event.get("request", {}) if "httpMethod" in aws_event: request["method"] = aws_event["httpMethod"] request["url"] = _get_url(aws_event, aws_context) if "queryStringParameters" in aws_event: request["query_string"] = aws_event["queryStringParameters"] if "headers" in aws_event: request["headers"] = _filter_headers(aws_event["headers"]) if should_send_default_pii(): user_info = sentry_event.setdefault("user", {}) identity = aws_event.get("identity") if identity is None: identity = {} id = identity.get("userArn") if id is not None: user_info.setdefault("id", id) ip = identity.get("sourceIp") if ip is not None: user_info.setdefault("ip_address", ip) if "body" in aws_event: request["data"] = aws_event.get("body", "") else: if aws_event.get("body", None): # Unfortunately couldn't find a way to get structured body from AWS # event. Meaning every body is unstructured to us. request["data"] = AnnotatedValue.removed_because_raw_data() sentry_event["request"] = deepcopy(request) return sentry_event return event_processor def _get_url(aws_event, aws_context): # type: (Any, Any) -> str path = aws_event.get("path", None) headers = aws_event.get("headers") if headers is None: headers = {} host = headers.get("Host", None) proto = headers.get("X-Forwarded-Proto", None) if proto and host and path: return "{}://{}{}".format(proto, host, path) return "awslambda:///{}".format(aws_context.function_name) def _get_cloudwatch_logs_url(aws_context, start_time): # type: (Any, datetime) -> str """ Generates a CloudWatchLogs console URL based on the context object Arguments: aws_context {Any} -- context from lambda handler Returns: str -- AWS Console URL to logs. """ formatstring = "%Y-%m-%dT%H:%M:%SZ" region = environ.get("AWS_REGION", "") url = ( "https://console.{domain}/cloudwatch/home?region={region}" "#logEventViewer:group={log_group};stream={log_stream}" ";start={start_time};end={end_time}" ).format( domain="amazonaws.cn" if region.startswith("cn-") else "aws.amazon.com", region=region, log_group=aws_context.log_group_name, log_stream=aws_context.log_stream_name, start_time=(start_time - timedelta(seconds=1)).strftime(formatstring), end_time=(datetime.now(timezone.utc) + timedelta(seconds=2)).strftime( formatstring ), ) return url def _parse_formatted_traceback(formatted_tb): # type: (list[str]) -> list[dict[str, Any]] frames = [] for frame in formatted_tb: match = re.match(r'File "(.+)", line (\d+), in (.+)', frame.strip()) if match: file_name, line_number, func_name = match.groups() line_number = int(line_number) frames.append( { "filename": file_name, "function": func_name, "lineno": line_number, "vars": None, "pre_context": None, "context_line": None, "post_context": None, } ) return frames def _event_from_error_json(error_json): # type: (dict[str, Any]) -> Event """ Converts the error JSON from AWS Lambda into a Sentry error event. This is not a full fletched event, but better than nothing. This is an example of where AWS creates the error JSON: https://github.com/aws/aws-lambda-python-runtime-interface-client/blob/2.2.1/awslambdaric/bootstrap.py#L479 """ event = { "level": "error", "exception": { "values": [ { "type": error_json.get("errorType"), "value": error_json.get("errorMessage"), "stacktrace": { "frames": _parse_formatted_traceback( error_json.get("stackTrace", []) ), }, "mechanism": { "type": "aws_lambda", "handled": False, }, } ], }, } # type: Event return event sentry-python-2.18.0/sentry_sdk/integrations/beam.py000066400000000000000000000120761471214654000226300ustar00rootroot00000000000000import sys import types from functools import wraps import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, reraise, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Iterator from typing import TypeVar from typing import Callable from sentry_sdk._types import ExcInfo T = TypeVar("T") F = TypeVar("F", bound=Callable[..., Any]) WRAPPED_FUNC = "_wrapped_{}_" INSPECT_FUNC = "_inspect_{}" # Required format per apache_beam/transforms/core.py USED_FUNC = "_sentry_used_" class BeamIntegration(Integration): identifier = "beam" @staticmethod def setup_once(): # type: () -> None from apache_beam.transforms.core import DoFn, ParDo # type: ignore ignore_logger("root") ignore_logger("bundle_processor.create") function_patches = ["process", "start_bundle", "finish_bundle", "setup"] for func_name in function_patches: setattr( DoFn, INSPECT_FUNC.format(func_name), _wrap_inspect_call(DoFn, func_name), ) old_init = ParDo.__init__ def sentry_init_pardo(self, fn, *args, **kwargs): # type: (ParDo, Any, *Any, **Any) -> Any # Do not monkey patch init twice if not getattr(self, "_sentry_is_patched", False): for func_name in function_patches: if not hasattr(fn, func_name): continue wrapped_func = WRAPPED_FUNC.format(func_name) # Check to see if inspect is set and process is not # to avoid monkey patching process twice. # Check to see if function is part of object for # backwards compatibility. process_func = getattr(fn, func_name) inspect_func = getattr(fn, INSPECT_FUNC.format(func_name)) if not getattr(inspect_func, USED_FUNC, False) and not getattr( process_func, USED_FUNC, False ): setattr(fn, wrapped_func, process_func) setattr(fn, func_name, _wrap_task_call(process_func)) self._sentry_is_patched = True old_init(self, fn, *args, **kwargs) ParDo.__init__ = sentry_init_pardo def _wrap_inspect_call(cls, func_name): # type: (Any, Any) -> Any if not hasattr(cls, func_name): return None def _inspect(self): # type: (Any) -> Any """ Inspect function overrides the way Beam gets argspec. """ wrapped_func = WRAPPED_FUNC.format(func_name) if hasattr(self, wrapped_func): process_func = getattr(self, wrapped_func) else: process_func = getattr(self, func_name) setattr(self, func_name, _wrap_task_call(process_func)) setattr(self, wrapped_func, process_func) # getfullargspec is deprecated in more recent beam versions and get_function_args_defaults # (which uses Signatures internally) should be used instead. try: from apache_beam.transforms.core import get_function_args_defaults return get_function_args_defaults(process_func) except ImportError: from apache_beam.typehints.decorators import getfullargspec # type: ignore return getfullargspec(process_func) setattr(_inspect, USED_FUNC, True) return _inspect def _wrap_task_call(func): # type: (F) -> F """ Wrap task call with a try catch to get exceptions. """ @wraps(func) def _inner(*args, **kwargs): # type: (*Any, **Any) -> Any try: gen = func(*args, **kwargs) except Exception: raise_exception() if not isinstance(gen, types.GeneratorType): return gen return _wrap_generator_call(gen) setattr(_inner, USED_FUNC, True) return _inner # type: ignore @ensure_integration_enabled(BeamIntegration) def _capture_exception(exc_info): # type: (ExcInfo) -> None """ Send Beam exception to Sentry. """ client = sentry_sdk.get_client() event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "beam", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def raise_exception(): # type: () -> None """ Raise an exception. """ exc_info = sys.exc_info() with capture_internal_exceptions(): _capture_exception(exc_info) reraise(*exc_info) def _wrap_generator_call(gen): # type: (Iterator[T]) -> Iterator[T] """ Wrap the generator to handle any failures. """ while True: try: yield next(gen) except StopIteration: break except Exception: raise_exception() sentry-python-2.18.0/sentry_sdk/integrations/boto3.py000066400000000000000000000107311471214654000227460ustar00rootroot00000000000000from functools import partial import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.tracing import Span from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, parse_url, parse_version, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Optional from typing import Type try: from botocore import __version__ as BOTOCORE_VERSION # type: ignore from botocore.client import BaseClient # type: ignore from botocore.response import StreamingBody # type: ignore from botocore.awsrequest import AWSRequest # type: ignore except ImportError: raise DidNotEnable("botocore is not installed") class Boto3Integration(Integration): identifier = "boto3" origin = f"auto.http.{identifier}" @staticmethod def setup_once(): # type: () -> None version = parse_version(BOTOCORE_VERSION) if version is None: raise DidNotEnable( "Unparsable botocore version: {}".format(BOTOCORE_VERSION) ) if version < (1, 12): raise DidNotEnable("Botocore 1.12 or newer is required.") orig_init = BaseClient.__init__ def sentry_patched_init(self, *args, **kwargs): # type: (Type[BaseClient], *Any, **Any) -> None orig_init(self, *args, **kwargs) meta = self.meta service_id = meta.service_model.service_id.hyphenize() meta.events.register( "request-created", partial(_sentry_request_created, service_id=service_id), ) meta.events.register("after-call", _sentry_after_call) meta.events.register("after-call-error", _sentry_after_call_error) BaseClient.__init__ = sentry_patched_init @ensure_integration_enabled(Boto3Integration) def _sentry_request_created(service_id, request, operation_name, **kwargs): # type: (str, AWSRequest, str, **Any) -> None description = "aws.%s.%s" % (service_id, operation_name) span = sentry_sdk.start_span( op=OP.HTTP_CLIENT, name=description, origin=Boto3Integration.origin, ) with capture_internal_exceptions(): parsed_url = parse_url(request.url, sanitize=False) span.set_data("aws.request.url", parsed_url.url) span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) span.set_tag("aws.service_id", service_id) span.set_tag("aws.operation_name", operation_name) span.set_data(SPANDATA.HTTP_METHOD, request.method) # We do it in order for subsequent http calls/retries be # attached to this span. span.__enter__() # request.context is an open-ended data-structure # where we can add anything useful in request life cycle. request.context["_sentrysdk_span"] = span def _sentry_after_call(context, parsed, **kwargs): # type: (Dict[str, Any], Dict[str, Any], **Any) -> None span = context.pop("_sentrysdk_span", None) # type: Optional[Span] # Span could be absent if the integration is disabled. if span is None: return span.__exit__(None, None, None) body = parsed.get("Body") if not isinstance(body, StreamingBody): return streaming_span = span.start_child( op=OP.HTTP_CLIENT_STREAM, name=span.description, origin=Boto3Integration.origin, ) orig_read = body.read orig_close = body.close def sentry_streaming_body_read(*args, **kwargs): # type: (*Any, **Any) -> bytes try: ret = orig_read(*args, **kwargs) if not ret: streaming_span.finish() return ret except Exception: streaming_span.finish() raise body.read = sentry_streaming_body_read def sentry_streaming_body_close(*args, **kwargs): # type: (*Any, **Any) -> None streaming_span.finish() orig_close(*args, **kwargs) body.close = sentry_streaming_body_close def _sentry_after_call_error(context, exception, **kwargs): # type: (Dict[str, Any], Type[BaseException], **Any) -> None span = context.pop("_sentrysdk_span", None) # type: Optional[Span] # Span could be absent if the integration is disabled. if span is None: return span.__exit__(type(exception), exception, None) sentry-python-2.18.0/sentry_sdk/integrations/bottle.py000066400000000000000000000147061471214654000232170ustar00rootroot00000000000000import functools import sentry_sdk from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, parse_version, transaction_from_function, ) from sentry_sdk.integrations import ( Integration, DidNotEnable, _DEFAULT_FAILED_REQUEST_STATUS_CODES, ) from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.integrations._wsgi_common import RequestExtractor from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Set from sentry_sdk.integrations.wsgi import _ScopedResponse from typing import Any from typing import Dict from typing import Callable from typing import Optional from bottle import FileUpload, FormsDict, LocalRequest # type: ignore from sentry_sdk._types import EventProcessor, Event try: from bottle import ( Bottle, HTTPResponse, Route, request as bottle_request, __version__ as BOTTLE_VERSION, ) except ImportError: raise DidNotEnable("Bottle not installed") TRANSACTION_STYLE_VALUES = ("endpoint", "url") class BottleIntegration(Integration): identifier = "bottle" origin = f"auto.http.{identifier}" transaction_style = "" def __init__( self, transaction_style="endpoint", # type: str *, failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int] ): # type: (...) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style self.failed_request_status_codes = failed_request_status_codes @staticmethod def setup_once(): # type: () -> None version = parse_version(BOTTLE_VERSION) if version is None: raise DidNotEnable("Unparsable Bottle version: {}".format(BOTTLE_VERSION)) if version < (0, 12): raise DidNotEnable("Bottle 0.12 or newer required.") old_app = Bottle.__call__ @ensure_integration_enabled(BottleIntegration, old_app) def sentry_patched_wsgi_app(self, environ, start_response): # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse middleware = SentryWsgiMiddleware( lambda *a, **kw: old_app(self, *a, **kw), span_origin=BottleIntegration.origin, ) return middleware(environ, start_response) Bottle.__call__ = sentry_patched_wsgi_app old_handle = Bottle._handle @functools.wraps(old_handle) def _patched_handle(self, environ): # type: (Bottle, Dict[str, Any]) -> Any integration = sentry_sdk.get_client().get_integration(BottleIntegration) if integration is None: return old_handle(self, environ) scope = sentry_sdk.get_isolation_scope() scope._name = "bottle" scope.add_event_processor( _make_request_event_processor(self, bottle_request, integration) ) res = old_handle(self, environ) return res Bottle._handle = _patched_handle old_make_callback = Route._make_callback @functools.wraps(old_make_callback) def patched_make_callback(self, *args, **kwargs): # type: (Route, *object, **object) -> Any prepared_callback = old_make_callback(self, *args, **kwargs) integration = sentry_sdk.get_client().get_integration(BottleIntegration) if integration is None: return prepared_callback def wrapped_callback(*args, **kwargs): # type: (*object, **object) -> Any try: res = prepared_callback(*args, **kwargs) except Exception as exception: _capture_exception(exception, handled=False) raise exception if ( isinstance(res, HTTPResponse) and res.status_code in integration.failed_request_status_codes ): _capture_exception(res, handled=True) return res return wrapped_callback Route._make_callback = patched_make_callback class BottleRequestExtractor(RequestExtractor): def env(self): # type: () -> Dict[str, str] return self.request.environ def cookies(self): # type: () -> Dict[str, str] return self.request.cookies def raw_data(self): # type: () -> bytes return self.request.body.read() def form(self): # type: () -> FormsDict if self.is_json(): return None return self.request.forms.decode() def files(self): # type: () -> Optional[Dict[str, str]] if self.is_json(): return None return self.request.files def size_of_file(self, file): # type: (FileUpload) -> int return file.content_length def _set_transaction_name_and_source(event, transaction_style, request): # type: (Event, str, Any) -> None name = "" if transaction_style == "url": name = request.route.rule or "" elif transaction_style == "endpoint": name = ( request.route.name or transaction_from_function(request.route.callback) or "" ) event["transaction"] = name event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} def _make_request_event_processor(app, request, integration): # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor def event_processor(event, hint): # type: (Event, dict[str, Any]) -> Event _set_transaction_name_and_source(event, integration.transaction_style, request) with capture_internal_exceptions(): BottleRequestExtractor(request).extract_into_event(event) return event return event_processor def _capture_exception(exception, handled): # type: (BaseException, bool) -> None event, hint = event_from_exception( exception, client_options=sentry_sdk.get_client().options, mechanism={"type": "bottle", "handled": handled}, ) sentry_sdk.capture_event(event, hint=hint) sentry-python-2.18.0/sentry_sdk/integrations/celery/000077500000000000000000000000001471214654000226275ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/integrations/celery/__init__.py000066400000000000000000000443511471214654000247470ustar00rootroot00000000000000import sys from collections.abc import Mapping from functools import wraps import sentry_sdk from sentry_sdk import isolation_scope from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.celery.beat import ( _patch_beat_apply_entry, _patch_redbeat_maybe_due, _setup_celery_beat_signals, ) from sentry_sdk.integrations.celery.utils import _now_seconds_since_epoch from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TRANSACTION_SOURCE_TASK from sentry_sdk.tracing_utils import Baggage from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, reraise, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import List from typing import Optional from typing import TypeVar from typing import Union from sentry_sdk._types import EventProcessor, Event, Hint, ExcInfo from sentry_sdk.tracing import Span F = TypeVar("F", bound=Callable[..., Any]) try: from celery import VERSION as CELERY_VERSION # type: ignore from celery.app.task import Task # type: ignore from celery.app.trace import task_has_custom from celery.exceptions import ( # type: ignore Ignore, Reject, Retry, SoftTimeLimitExceeded, ) from kombu import Producer # type: ignore except ImportError: raise DidNotEnable("Celery not installed") CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject) class CeleryIntegration(Integration): identifier = "celery" origin = f"auto.queue.{identifier}" def __init__( self, propagate_traces=True, monitor_beat_tasks=False, exclude_beat_tasks=None, ): # type: (bool, bool, Optional[List[str]]) -> None self.propagate_traces = propagate_traces self.monitor_beat_tasks = monitor_beat_tasks self.exclude_beat_tasks = exclude_beat_tasks _patch_beat_apply_entry() _patch_redbeat_maybe_due() _setup_celery_beat_signals(monitor_beat_tasks) @staticmethod def setup_once(): # type: () -> None if CELERY_VERSION < (4, 4, 7): raise DidNotEnable("Celery 4.4.7 or newer required.") _patch_build_tracer() _patch_task_apply_async() _patch_celery_send_task() _patch_worker_exit() _patch_producer_publish() # This logger logs every status of every task that ran on the worker. # Meaning that every task's breadcrumbs are full of stuff like "Task # raised unexpected ". ignore_logger("celery.worker.job") ignore_logger("celery.app.trace") # This is stdout/err redirected to a logger, can't deal with this # (need event_level=logging.WARN to reproduce) ignore_logger("celery.redirected") def _set_status(status): # type: (str) -> None with capture_internal_exceptions(): scope = sentry_sdk.get_current_scope() if scope.span is not None: scope.span.set_status(status) def _capture_exception(task, exc_info): # type: (Any, ExcInfo) -> None client = sentry_sdk.get_client() if client.get_integration(CeleryIntegration) is None: return if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS): # ??? Doesn't map to anything _set_status("aborted") return _set_status("internal_error") if hasattr(task, "throws") and isinstance(exc_info[1], task.throws): return event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "celery", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _make_event_processor(task, uuid, args, kwargs, request=None): # type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor def event_processor(event, hint): # type: (Event, Hint) -> Optional[Event] with capture_internal_exceptions(): tags = event.setdefault("tags", {}) tags["celery_task_id"] = uuid extra = event.setdefault("extra", {}) extra["celery-job"] = { "task_name": task.name, "args": args, "kwargs": kwargs, } if "exc_info" in hint: with capture_internal_exceptions(): if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded): event["fingerprint"] = [ "celery", "SoftTimeLimitExceeded", getattr(task, "name", task), ] return event return event_processor def _update_celery_task_headers(original_headers, span, monitor_beat_tasks): # type: (dict[str, Any], Optional[Span], bool) -> dict[str, Any] """ Updates the headers of the Celery task with the tracing information and eventually Sentry Crons monitoring information for beat tasks. """ updated_headers = original_headers.copy() with capture_internal_exceptions(): # if span is None (when the task was started by Celery Beat) # this will return the trace headers from the scope. headers = dict( sentry_sdk.get_isolation_scope().iter_trace_propagation_headers(span=span) ) if monitor_beat_tasks: headers.update( { "sentry-monitor-start-timestamp-s": "%.9f" % _now_seconds_since_epoch(), } ) # Add the time the task was enqueued to the headers # This is used in the consumer to calculate the latency updated_headers.update( {"sentry-task-enqueued-time": _now_seconds_since_epoch()} ) if headers: existing_baggage = updated_headers.get(BAGGAGE_HEADER_NAME) sentry_baggage = headers.get(BAGGAGE_HEADER_NAME) combined_baggage = sentry_baggage or existing_baggage if sentry_baggage and existing_baggage: # Merge incoming and sentry baggage, where the sentry trace information # in the incoming baggage takes precedence and the third-party items # are concatenated. incoming = Baggage.from_incoming_header(existing_baggage) combined = Baggage.from_incoming_header(sentry_baggage) combined.sentry_items.update(incoming.sentry_items) combined.third_party_items = ",".join( [ x for x in [ combined.third_party_items, incoming.third_party_items, ] if x is not None and x != "" ] ) combined_baggage = combined.serialize(include_third_party=True) updated_headers.update(headers) if combined_baggage: updated_headers[BAGGAGE_HEADER_NAME] = combined_baggage # https://github.com/celery/celery/issues/4875 # # Need to setdefault the inner headers too since other # tracing tools (dd-trace-py) also employ this exact # workaround and we don't want to break them. updated_headers.setdefault("headers", {}).update(headers) if combined_baggage: updated_headers["headers"][BAGGAGE_HEADER_NAME] = combined_baggage # Add the Sentry options potentially added in `sentry_apply_entry` # to the headers (done when auto-instrumenting Celery Beat tasks) for key, value in updated_headers.items(): if key.startswith("sentry-"): updated_headers["headers"][key] = value return updated_headers class NoOpMgr: def __enter__(self): # type: () -> None return None def __exit__(self, exc_type, exc_value, traceback): # type: (Any, Any, Any) -> None return None def _wrap_task_run(f): # type: (F) -> F @wraps(f) def apply_async(*args, **kwargs): # type: (*Any, **Any) -> Any # Note: kwargs can contain headers=None, so no setdefault! # Unsure which backend though. integration = sentry_sdk.get_client().get_integration(CeleryIntegration) if integration is None: return f(*args, **kwargs) kwarg_headers = kwargs.get("headers") or {} propagate_traces = kwarg_headers.pop( "sentry-propagate-traces", integration.propagate_traces ) if not propagate_traces: return f(*args, **kwargs) if isinstance(args[0], Task): task_name = args[0].name # type: str elif len(args) > 1 and isinstance(args[1], str): task_name = args[1] else: task_name = "" task_started_from_beat = sentry_sdk.get_isolation_scope()._name == "celery-beat" span_mgr = ( sentry_sdk.start_span( op=OP.QUEUE_SUBMIT_CELERY, name=task_name, origin=CeleryIntegration.origin, ) if not task_started_from_beat else NoOpMgr() ) # type: Union[Span, NoOpMgr] with span_mgr as span: kwargs["headers"] = _update_celery_task_headers( kwarg_headers, span, integration.monitor_beat_tasks ) return f(*args, **kwargs) return apply_async # type: ignore def _wrap_tracer(task, f): # type: (Any, F) -> F # Need to wrap tracer for pushing the scope before prerun is sent, and # popping it after postrun is sent. # # This is the reason we don't use signals for hooking in the first place. # Also because in Celery 3, signal dispatch returns early if one handler # crashes. @wraps(f) @ensure_integration_enabled(CeleryIntegration, f) def _inner(*args, **kwargs): # type: (*Any, **Any) -> Any with isolation_scope() as scope: scope._name = "celery" scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(task, *args, **kwargs)) transaction = None # Celery task objects are not a thing to be trusted. Even # something such as attribute access can fail. with capture_internal_exceptions(): headers = args[3].get("headers") or {} transaction = continue_trace( headers, op=OP.QUEUE_TASK_CELERY, name="unknown celery task", source=TRANSACTION_SOURCE_TASK, origin=CeleryIntegration.origin, ) transaction.name = task.name transaction.set_status(SPANSTATUS.OK) if transaction is None: return f(*args, **kwargs) with sentry_sdk.start_transaction( transaction, custom_sampling_context={ "celery_job": { "task": task.name, # for some reason, args[1] is a list if non-empty but a # tuple if empty "args": list(args[1]), "kwargs": args[2], } }, ): return f(*args, **kwargs) return _inner # type: ignore def _set_messaging_destination_name(task, span): # type: (Any, Span) -> None """Set "messaging.destination.name" tag for span""" with capture_internal_exceptions(): delivery_info = task.request.delivery_info if delivery_info: routing_key = delivery_info.get("routing_key") if delivery_info.get("exchange") == "" and routing_key is not None: # Empty exchange indicates the default exchange, meaning the tasks # are sent to the queue with the same name as the routing key. span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) def _wrap_task_call(task, f): # type: (Any, F) -> F # Need to wrap task call because the exception is caught before we get to # see it. Also celery's reported stacktrace is untrustworthy. # functools.wraps is important here because celery-once looks at this # method's name. @ensure_integration_enabled internally calls functools.wraps, # but if we ever remove the @ensure_integration_enabled decorator, we need # to add @functools.wraps(f) here. # https://github.com/getsentry/sentry-python/issues/421 @ensure_integration_enabled(CeleryIntegration, f) def _inner(*args, **kwargs): # type: (*Any, **Any) -> Any try: with sentry_sdk.start_span( op=OP.QUEUE_PROCESS, name=task.name, origin=CeleryIntegration.origin, ) as span: _set_messaging_destination_name(task, span) latency = None with capture_internal_exceptions(): if ( task.request.headers is not None and "sentry-task-enqueued-time" in task.request.headers ): latency = _now_seconds_since_epoch() - task.request.headers.pop( "sentry-task-enqueued-time" ) if latency is not None: span.set_data(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency) with capture_internal_exceptions(): span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id) with capture_internal_exceptions(): span.set_data( SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, task.request.retries ) with capture_internal_exceptions(): span.set_data( SPANDATA.MESSAGING_SYSTEM, task.app.connection().transport.driver_type, ) return f(*args, **kwargs) except Exception: exc_info = sys.exc_info() with capture_internal_exceptions(): _capture_exception(task, exc_info) reraise(*exc_info) return _inner # type: ignore def _patch_build_tracer(): # type: () -> None import celery.app.trace as trace # type: ignore original_build_tracer = trace.build_tracer def sentry_build_tracer(name, task, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> Any if not getattr(task, "_sentry_is_patched", False): # determine whether Celery will use __call__ or run and patch # accordingly if task_has_custom(task, "__call__"): type(task).__call__ = _wrap_task_call(task, type(task).__call__) else: task.run = _wrap_task_call(task, task.run) # `build_tracer` is apparently called for every task # invocation. Can't wrap every celery task for every invocation # or we will get infinitely nested wrapper functions. task._sentry_is_patched = True return _wrap_tracer(task, original_build_tracer(name, task, *args, **kwargs)) trace.build_tracer = sentry_build_tracer def _patch_task_apply_async(): # type: () -> None Task.apply_async = _wrap_task_run(Task.apply_async) def _patch_celery_send_task(): # type: () -> None from celery import Celery Celery.send_task = _wrap_task_run(Celery.send_task) def _patch_worker_exit(): # type: () -> None # Need to flush queue before worker shutdown because a crashing worker will # call os._exit from billiard.pool import Worker # type: ignore original_workloop = Worker.workloop def sentry_workloop(*args, **kwargs): # type: (*Any, **Any) -> Any try: return original_workloop(*args, **kwargs) finally: with capture_internal_exceptions(): if ( sentry_sdk.get_client().get_integration(CeleryIntegration) is not None ): sentry_sdk.flush() Worker.workloop = sentry_workloop def _patch_producer_publish(): # type: () -> None original_publish = Producer.publish @ensure_integration_enabled(CeleryIntegration, original_publish) def sentry_publish(self, *args, **kwargs): # type: (Producer, *Any, **Any) -> Any kwargs_headers = kwargs.get("headers", {}) if not isinstance(kwargs_headers, Mapping): # Ensure kwargs_headers is a Mapping, so we can safely call get(). # We don't expect this to happen, but it's better to be safe. Even # if it does happen, only our instrumentation breaks. This line # does not overwrite kwargs["headers"], so the original publish # method will still work. kwargs_headers = {} task_name = kwargs_headers.get("task") task_id = kwargs_headers.get("id") retries = kwargs_headers.get("retries") routing_key = kwargs.get("routing_key") exchange = kwargs.get("exchange") with sentry_sdk.start_span( op=OP.QUEUE_PUBLISH, name=task_name, origin=CeleryIntegration.origin, ) as span: if task_id is not None: span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task_id) if exchange == "" and routing_key is not None: # Empty exchange indicates the default exchange, meaning messages are # routed to the queue with the same name as the routing key. span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) if retries is not None: span.set_data(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries) with capture_internal_exceptions(): span.set_data( SPANDATA.MESSAGING_SYSTEM, self.connection.transport.driver_type ) return original_publish(self, *args, **kwargs) Producer.publish = sentry_publish sentry-python-2.18.0/sentry_sdk/integrations/celery/beat.py000066400000000000000000000213631471214654000241210ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.crons import capture_checkin, MonitorStatus from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.celery.utils import ( _get_humanized_interval, _now_seconds_since_epoch, ) from sentry_sdk.utils import ( logger, match_regex_list, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Callable from typing import Any, Optional, TypeVar, Union from sentry_sdk._types import ( MonitorConfig, MonitorConfigScheduleType, MonitorConfigScheduleUnit, ) F = TypeVar("F", bound=Callable[..., Any]) try: from celery import Task, Celery # type: ignore from celery.beat import Scheduler # type: ignore from celery.schedules import crontab, schedule # type: ignore from celery.signals import ( # type: ignore task_failure, task_success, task_retry, ) except ImportError: raise DidNotEnable("Celery not installed") try: from redbeat.schedulers import RedBeatScheduler # type: ignore except ImportError: RedBeatScheduler = None def _get_headers(task): # type: (Task) -> dict[str, Any] headers = task.request.get("headers") or {} # flatten nested headers if "headers" in headers: headers.update(headers["headers"]) del headers["headers"] headers.update(task.request.get("properties") or {}) return headers def _get_monitor_config(celery_schedule, app, monitor_name): # type: (Any, Celery, str) -> MonitorConfig monitor_config = {} # type: MonitorConfig schedule_type = None # type: Optional[MonitorConfigScheduleType] schedule_value = None # type: Optional[Union[str, int]] schedule_unit = None # type: Optional[MonitorConfigScheduleUnit] if isinstance(celery_schedule, crontab): schedule_type = "crontab" schedule_value = ( "{0._orig_minute} " "{0._orig_hour} " "{0._orig_day_of_month} " "{0._orig_month_of_year} " "{0._orig_day_of_week}".format(celery_schedule) ) elif isinstance(celery_schedule, schedule): schedule_type = "interval" (schedule_value, schedule_unit) = _get_humanized_interval( celery_schedule.seconds ) if schedule_unit == "second": logger.warning( "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.", monitor_name, schedule_value, ) return {} else: logger.warning( "Celery schedule type '%s' not supported by Sentry Crons.", type(celery_schedule), ) return {} monitor_config["schedule"] = {} monitor_config["schedule"]["type"] = schedule_type monitor_config["schedule"]["value"] = schedule_value if schedule_unit is not None: monitor_config["schedule"]["unit"] = schedule_unit monitor_config["timezone"] = ( ( hasattr(celery_schedule, "tz") and celery_schedule.tz is not None and str(celery_schedule.tz) ) or app.timezone or "UTC" ) return monitor_config def _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration): # type: (Any, Any, sentry_sdk.integrations.celery.CeleryIntegration) -> None """ Add Sentry Crons information to the schedule_entry headers. """ if not integration.monitor_beat_tasks: return monitor_name = schedule_entry.name task_should_be_excluded = match_regex_list( monitor_name, integration.exclude_beat_tasks ) if task_should_be_excluded: return celery_schedule = schedule_entry.schedule app = scheduler.app monitor_config = _get_monitor_config(celery_schedule, app, monitor_name) is_supported_schedule = bool(monitor_config) if not is_supported_schedule: return headers = schedule_entry.options.pop("headers", {}) headers.update( { "sentry-monitor-slug": monitor_name, "sentry-monitor-config": monitor_config, } ) check_in_id = capture_checkin( monitor_slug=monitor_name, monitor_config=monitor_config, status=MonitorStatus.IN_PROGRESS, ) headers.update({"sentry-monitor-check-in-id": check_in_id}) # Set the Sentry configuration in the options of the ScheduleEntry. # Those will be picked up in `apply_async` and added to the headers. schedule_entry.options["headers"] = headers def _wrap_beat_scheduler(original_function): # type: (Callable[..., Any]) -> Callable[..., Any] """ Makes sure that: - a new Sentry trace is started for each task started by Celery Beat and it is propagated to the task. - the Sentry Crons information is set in the Celery Beat task's headers so that is is monitored with Sentry Crons. After the patched function is called, Celery Beat will call apply_async to put the task in the queue. """ # Patch only once # Can't use __name__ here, because some of our tests mock original_apply_entry already_patched = "sentry_patched_scheduler" in str(original_function) if already_patched: return original_function from sentry_sdk.integrations.celery import CeleryIntegration def sentry_patched_scheduler(*args, **kwargs): # type: (*Any, **Any) -> None integration = sentry_sdk.get_client().get_integration(CeleryIntegration) if integration is None: return original_function(*args, **kwargs) # Tasks started by Celery Beat start a new Trace scope = sentry_sdk.get_isolation_scope() scope.set_new_propagation_context() scope._name = "celery-beat" scheduler, schedule_entry = args _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration) return original_function(*args, **kwargs) return sentry_patched_scheduler def _patch_beat_apply_entry(): # type: () -> None Scheduler.apply_entry = _wrap_beat_scheduler(Scheduler.apply_entry) def _patch_redbeat_maybe_due(): # type: () -> None if RedBeatScheduler is None: return RedBeatScheduler.maybe_due = _wrap_beat_scheduler(RedBeatScheduler.maybe_due) def _setup_celery_beat_signals(monitor_beat_tasks): # type: (bool) -> None if monitor_beat_tasks: task_success.connect(crons_task_success) task_failure.connect(crons_task_failure) task_retry.connect(crons_task_retry) def crons_task_success(sender, **kwargs): # type: (Task, dict[Any, Any]) -> None logger.debug("celery_task_success %s", sender) headers = _get_headers(sender) if "sentry-monitor-slug" not in headers: return monitor_config = headers.get("sentry-monitor-config", {}) start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s") capture_checkin( monitor_slug=headers["sentry-monitor-slug"], monitor_config=monitor_config, check_in_id=headers["sentry-monitor-check-in-id"], duration=( _now_seconds_since_epoch() - float(start_timestamp_s) if start_timestamp_s else None ), status=MonitorStatus.OK, ) def crons_task_failure(sender, **kwargs): # type: (Task, dict[Any, Any]) -> None logger.debug("celery_task_failure %s", sender) headers = _get_headers(sender) if "sentry-monitor-slug" not in headers: return monitor_config = headers.get("sentry-monitor-config", {}) start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s") capture_checkin( monitor_slug=headers["sentry-monitor-slug"], monitor_config=monitor_config, check_in_id=headers["sentry-monitor-check-in-id"], duration=( _now_seconds_since_epoch() - float(start_timestamp_s) if start_timestamp_s else None ), status=MonitorStatus.ERROR, ) def crons_task_retry(sender, **kwargs): # type: (Task, dict[Any, Any]) -> None logger.debug("celery_task_retry %s", sender) headers = _get_headers(sender) if "sentry-monitor-slug" not in headers: return monitor_config = headers.get("sentry-monitor-config", {}) start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s") capture_checkin( monitor_slug=headers["sentry-monitor-slug"], monitor_config=monitor_config, check_in_id=headers["sentry-monitor-check-in-id"], duration=( _now_seconds_since_epoch() - float(start_timestamp_s) if start_timestamp_s else None ), status=MonitorStatus.ERROR, ) sentry-python-2.18.0/sentry_sdk/integrations/celery/utils.py000066400000000000000000000022701471214654000243420ustar00rootroot00000000000000import time from typing import TYPE_CHECKING, cast if TYPE_CHECKING: from typing import Any, Tuple from sentry_sdk._types import MonitorConfigScheduleUnit def _now_seconds_since_epoch(): # type: () -> float # We cannot use `time.perf_counter()` when dealing with the duration # of a Celery task, because the start of a Celery task and # the end are recorded in different processes. # Start happens in the Celery Beat process, # the end in a Celery Worker process. return time.time() def _get_humanized_interval(seconds): # type: (float) -> Tuple[int, MonitorConfigScheduleUnit] TIME_UNITS = ( # noqa: N806 ("day", 60 * 60 * 24.0), ("hour", 60 * 60.0), ("minute", 60.0), ) seconds = float(seconds) for unit, divider in TIME_UNITS: if seconds >= divider: interval = int(seconds / divider) return (interval, cast("MonitorConfigScheduleUnit", unit)) return (int(seconds), "second") class NoOpMgr: def __enter__(self): # type: () -> None return None def __exit__(self, exc_type, exc_value, traceback): # type: (Any, Any, Any) -> None return None sentry-python-2.18.0/sentry_sdk/integrations/chalice.py000066400000000000000000000111471471214654000233120ustar00rootroot00000000000000import sys from functools import wraps import sentry_sdk from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.aws_lambda import _make_request_event_processor from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, parse_version, reraise, ) try: import chalice # type: ignore from chalice import __version__ as CHALICE_VERSION from chalice import Chalice, ChaliceViewError from chalice.app import EventSourceHandler as ChaliceEventSourceHandler # type: ignore except ImportError: raise DidNotEnable("Chalice is not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from typing import TypeVar from typing import Callable F = TypeVar("F", bound=Callable[..., Any]) class EventSourceHandler(ChaliceEventSourceHandler): # type: ignore def __call__(self, event, context): # type: (Any, Any) -> Any client = sentry_sdk.get_client() with sentry_sdk.isolation_scope() as scope: with capture_internal_exceptions(): configured_time = context.get_remaining_time_in_millis() scope.add_event_processor( _make_request_event_processor(event, context, configured_time) ) try: return ChaliceEventSourceHandler.__call__(self, event, context) except Exception: exc_info = sys.exc_info() event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "chalice", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) client.flush() reraise(*exc_info) def _get_view_function_response(app, view_function, function_args): # type: (Any, F, Any) -> F @wraps(view_function) def wrapped_view_function(**function_args): # type: (**Any) -> Any client = sentry_sdk.get_client() with sentry_sdk.isolation_scope() as scope: with capture_internal_exceptions(): configured_time = app.lambda_context.get_remaining_time_in_millis() scope.set_transaction_name( app.lambda_context.function_name, source=TRANSACTION_SOURCE_COMPONENT, ) scope.add_event_processor( _make_request_event_processor( app.current_request.to_dict(), app.lambda_context, configured_time, ) ) try: return view_function(**function_args) except Exception as exc: if isinstance(exc, ChaliceViewError): raise exc_info = sys.exc_info() event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "chalice", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) client.flush() raise return wrapped_view_function # type: ignore class ChaliceIntegration(Integration): identifier = "chalice" @staticmethod def setup_once(): # type: () -> None version = parse_version(CHALICE_VERSION) if version is None: raise DidNotEnable("Unparsable Chalice version: {}".format(CHALICE_VERSION)) if version < (1, 20): old_get_view_function_response = Chalice._get_view_function_response else: from chalice.app import RestAPIEventHandler old_get_view_function_response = ( RestAPIEventHandler._get_view_function_response ) def sentry_event_response(app, view_function, function_args): # type: (Any, F, Dict[str, Any]) -> Any wrapped_view_function = _get_view_function_response( app, view_function, function_args ) return old_get_view_function_response( app, wrapped_view_function, function_args ) if version < (1, 20): Chalice._get_view_function_response = sentry_event_response else: RestAPIEventHandler._get_view_function_response = sentry_event_response # for everything else (like events) chalice.app.EventSourceHandler = EventSourceHandler sentry-python-2.18.0/sentry_sdk/integrations/clickhouse_driver.py000066400000000000000000000121701471214654000254230ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.tracing import Span from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled from typing import TYPE_CHECKING, TypeVar # Hack to get new Python features working in older versions # without introducing a hard dependency on `typing_extensions` # from: https://stackoverflow.com/a/71944042/300572 if TYPE_CHECKING: from typing import ParamSpec, Callable else: # Fake ParamSpec class ParamSpec: def __init__(self, _): self.args = None self.kwargs = None # Callable[anything] will return None class _Callable: def __getitem__(self, _): return None # Make instances Callable = _Callable() try: import clickhouse_driver # type: ignore[import-not-found] except ImportError: raise DidNotEnable("clickhouse-driver not installed.") if clickhouse_driver.VERSION < (0, 2, 0): raise DidNotEnable("clickhouse-driver >= 0.2.0 required") class ClickhouseDriverIntegration(Integration): identifier = "clickhouse_driver" origin = f"auto.db.{identifier}" @staticmethod def setup_once() -> None: # Every query is done using the Connection's `send_query` function clickhouse_driver.connection.Connection.send_query = _wrap_start( clickhouse_driver.connection.Connection.send_query ) # If the query contains parameters then the send_data function is used to send those parameters to clickhouse clickhouse_driver.client.Client.send_data = _wrap_send_data( clickhouse_driver.client.Client.send_data ) # Every query ends either with the Client's `receive_end_of_query` (no result expected) # or its `receive_result` (result expected) clickhouse_driver.client.Client.receive_end_of_query = _wrap_end( clickhouse_driver.client.Client.receive_end_of_query ) if hasattr(clickhouse_driver.client.Client, "receive_end_of_insert_query"): # In 0.2.7, insert queries are handled separately via `receive_end_of_insert_query` clickhouse_driver.client.Client.receive_end_of_insert_query = _wrap_end( clickhouse_driver.client.Client.receive_end_of_insert_query ) clickhouse_driver.client.Client.receive_result = _wrap_end( clickhouse_driver.client.Client.receive_result ) P = ParamSpec("P") T = TypeVar("T") def _wrap_start(f: Callable[P, T]) -> Callable[P, T]: @ensure_integration_enabled(ClickhouseDriverIntegration, f) def _inner(*args: P.args, **kwargs: P.kwargs) -> T: connection = args[0] query = args[1] query_id = args[2] if len(args) > 2 else kwargs.get("query_id") params = args[3] if len(args) > 3 else kwargs.get("params") span = sentry_sdk.start_span( op=OP.DB, name=query, origin=ClickhouseDriverIntegration.origin, ) connection._sentry_span = span # type: ignore[attr-defined] _set_db_data(span, connection) span.set_data("query", query) if query_id: span.set_data("db.query_id", query_id) if params and should_send_default_pii(): span.set_data("db.params", params) # run the original code ret = f(*args, **kwargs) return ret return _inner def _wrap_end(f: Callable[P, T]) -> Callable[P, T]: def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: res = f(*args, **kwargs) instance = args[0] span = getattr(instance.connection, "_sentry_span", None) # type: ignore[attr-defined] if span is not None: if res is not None and should_send_default_pii(): span.set_data("db.result", res) with capture_internal_exceptions(): span.scope.add_breadcrumb( message=span._data.pop("query"), category="query", data=span._data ) span.finish() return res return _inner_end def _wrap_send_data(f: Callable[P, T]) -> Callable[P, T]: def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: instance = args[0] # type: clickhouse_driver.client.Client data = args[2] span = getattr(instance.connection, "_sentry_span", None) if span is not None: _set_db_data(span, instance.connection) if should_send_default_pii(): db_params = span._data.get("db.params", []) db_params.extend(data) span.set_data("db.params", db_params) return f(*args, **kwargs) return _inner_send_data def _set_db_data( span: Span, connection: clickhouse_driver.connection.Connection ) -> None: span.set_data(SPANDATA.DB_SYSTEM, "clickhouse") span.set_data(SPANDATA.SERVER_ADDRESS, connection.host) span.set_data(SPANDATA.SERVER_PORT, connection.port) span.set_data(SPANDATA.DB_NAME, connection.database) span.set_data(SPANDATA.DB_USER, connection.user) sentry-python-2.18.0/sentry_sdk/integrations/cloud_resource_context.py000066400000000000000000000151301471214654000264770ustar00rootroot00000000000000import json import urllib3 from sentry_sdk.integrations import Integration from sentry_sdk.api import set_context from sentry_sdk.utils import logger from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Dict CONTEXT_TYPE = "cloud_resource" AWS_METADATA_HOST = "169.254.169.254" AWS_TOKEN_URL = "http://{}/latest/api/token".format(AWS_METADATA_HOST) AWS_METADATA_URL = "http://{}/latest/dynamic/instance-identity/document".format( AWS_METADATA_HOST ) GCP_METADATA_HOST = "metadata.google.internal" GCP_METADATA_URL = "http://{}/computeMetadata/v1/?recursive=true".format( GCP_METADATA_HOST ) class CLOUD_PROVIDER: # noqa: N801 """ Name of the cloud provider. see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/ """ ALIBABA = "alibaba_cloud" AWS = "aws" AZURE = "azure" GCP = "gcp" IBM = "ibm_cloud" TENCENT = "tencent_cloud" class CLOUD_PLATFORM: # noqa: N801 """ The cloud platform. see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/ """ AWS_EC2 = "aws_ec2" GCP_COMPUTE_ENGINE = "gcp_compute_engine" class CloudResourceContextIntegration(Integration): """ Adds cloud resource context to the Senty scope """ identifier = "cloudresourcecontext" cloud_provider = "" aws_token = "" http = urllib3.PoolManager() gcp_metadata = None def __init__(self, cloud_provider=""): # type: (str) -> None CloudResourceContextIntegration.cloud_provider = cloud_provider @classmethod def _is_aws(cls): # type: () -> bool try: r = cls.http.request( "PUT", AWS_TOKEN_URL, headers={"X-aws-ec2-metadata-token-ttl-seconds": "60"}, ) if r.status != 200: return False cls.aws_token = r.data.decode() return True except Exception: return False @classmethod def _get_aws_context(cls): # type: () -> Dict[str, str] ctx = { "cloud.provider": CLOUD_PROVIDER.AWS, "cloud.platform": CLOUD_PLATFORM.AWS_EC2, } try: r = cls.http.request( "GET", AWS_METADATA_URL, headers={"X-aws-ec2-metadata-token": cls.aws_token}, ) if r.status != 200: return ctx data = json.loads(r.data.decode("utf-8")) try: ctx["cloud.account.id"] = data["accountId"] except Exception: pass try: ctx["cloud.availability_zone"] = data["availabilityZone"] except Exception: pass try: ctx["cloud.region"] = data["region"] except Exception: pass try: ctx["host.id"] = data["instanceId"] except Exception: pass try: ctx["host.type"] = data["instanceType"] except Exception: pass except Exception: pass return ctx @classmethod def _is_gcp(cls): # type: () -> bool try: r = cls.http.request( "GET", GCP_METADATA_URL, headers={"Metadata-Flavor": "Google"}, ) if r.status != 200: return False cls.gcp_metadata = json.loads(r.data.decode("utf-8")) return True except Exception: return False @classmethod def _get_gcp_context(cls): # type: () -> Dict[str, str] ctx = { "cloud.provider": CLOUD_PROVIDER.GCP, "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE, } try: if cls.gcp_metadata is None: r = cls.http.request( "GET", GCP_METADATA_URL, headers={"Metadata-Flavor": "Google"}, ) if r.status != 200: return ctx cls.gcp_metadata = json.loads(r.data.decode("utf-8")) try: ctx["cloud.account.id"] = cls.gcp_metadata["project"]["projectId"] except Exception: pass try: ctx["cloud.availability_zone"] = cls.gcp_metadata["instance"][ "zone" ].split("/")[-1] except Exception: pass try: # only populated in google cloud run ctx["cloud.region"] = cls.gcp_metadata["instance"]["region"].split("/")[ -1 ] except Exception: pass try: ctx["host.id"] = cls.gcp_metadata["instance"]["id"] except Exception: pass except Exception: pass return ctx @classmethod def _get_cloud_provider(cls): # type: () -> str if cls._is_aws(): return CLOUD_PROVIDER.AWS if cls._is_gcp(): return CLOUD_PROVIDER.GCP return "" @classmethod def _get_cloud_resource_context(cls): # type: () -> Dict[str, str] cloud_provider = ( cls.cloud_provider if cls.cloud_provider != "" else CloudResourceContextIntegration._get_cloud_provider() ) if cloud_provider in context_getters.keys(): return context_getters[cloud_provider]() return {} @staticmethod def setup_once(): # type: () -> None cloud_provider = CloudResourceContextIntegration.cloud_provider unsupported_cloud_provider = ( cloud_provider != "" and cloud_provider not in context_getters.keys() ) if unsupported_cloud_provider: logger.warning( "Invalid value for cloud_provider: %s (must be in %s). Falling back to autodetection...", CloudResourceContextIntegration.cloud_provider, list(context_getters.keys()), ) context = CloudResourceContextIntegration._get_cloud_resource_context() if context != {}: set_context(CONTEXT_TYPE, context) # Map with the currently supported cloud providers # mapping to functions extracting the context context_getters = { CLOUD_PROVIDER.AWS: CloudResourceContextIntegration._get_aws_context, CLOUD_PROVIDER.GCP: CloudResourceContextIntegration._get_gcp_context, } sentry-python-2.18.0/sentry_sdk/integrations/cohere.py000066400000000000000000000220621471214654000231650ustar00rootroot00000000000000from functools import wraps from sentry_sdk import consts from sentry_sdk.ai.monitoring import record_token_usage from sentry_sdk.consts import SPANDATA from sentry_sdk.ai.utils import set_data_normalized from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable, Iterator from sentry_sdk.tracing import Span import sentry_sdk from sentry_sdk.scope import should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.utils import capture_internal_exceptions, event_from_exception try: from cohere.client import Client from cohere.base_client import BaseCohere from cohere import ( ChatStreamEndEvent, NonStreamedChatResponse, ) if TYPE_CHECKING: from cohere import StreamedChatResponse except ImportError: raise DidNotEnable("Cohere not installed") try: # cohere 5.9.3+ from cohere import StreamEndStreamedChatResponse except ImportError: from cohere import StreamedChatResponse_StreamEnd as StreamEndStreamedChatResponse COLLECTED_CHAT_PARAMS = { "model": SPANDATA.AI_MODEL_ID, "k": SPANDATA.AI_TOP_K, "p": SPANDATA.AI_TOP_P, "seed": SPANDATA.AI_SEED, "frequency_penalty": SPANDATA.AI_FREQUENCY_PENALTY, "presence_penalty": SPANDATA.AI_PRESENCE_PENALTY, "raw_prompting": SPANDATA.AI_RAW_PROMPTING, } COLLECTED_PII_CHAT_PARAMS = { "tools": SPANDATA.AI_TOOLS, "preamble": SPANDATA.AI_PREAMBLE, } COLLECTED_CHAT_RESP_ATTRS = { "generation_id": "ai.generation_id", "is_search_required": "ai.is_search_required", "finish_reason": "ai.finish_reason", } COLLECTED_PII_CHAT_RESP_ATTRS = { "citations": "ai.citations", "documents": "ai.documents", "search_queries": "ai.search_queries", "search_results": "ai.search_results", "tool_calls": "ai.tool_calls", } class CohereIntegration(Integration): identifier = "cohere" origin = f"auto.ai.{identifier}" def __init__(self, include_prompts=True): # type: (CohereIntegration, bool) -> None self.include_prompts = include_prompts @staticmethod def setup_once(): # type: () -> None BaseCohere.chat = _wrap_chat(BaseCohere.chat, streaming=False) Client.embed = _wrap_embed(Client.embed) BaseCohere.chat_stream = _wrap_chat(BaseCohere.chat_stream, streaming=True) def _capture_exception(exc): # type: (Any) -> None event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, mechanism={"type": "cohere", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _wrap_chat(f, streaming): # type: (Callable[..., Any], bool) -> Callable[..., Any] def collect_chat_response_fields(span, res, include_pii): # type: (Span, NonStreamedChatResponse, bool) -> None if include_pii: if hasattr(res, "text"): set_data_normalized( span, SPANDATA.AI_RESPONSES, [res.text], ) for pii_attr in COLLECTED_PII_CHAT_RESP_ATTRS: if hasattr(res, pii_attr): set_data_normalized(span, "ai." + pii_attr, getattr(res, pii_attr)) for attr in COLLECTED_CHAT_RESP_ATTRS: if hasattr(res, attr): set_data_normalized(span, "ai." + attr, getattr(res, attr)) if hasattr(res, "meta"): if hasattr(res.meta, "billed_units"): record_token_usage( span, prompt_tokens=res.meta.billed_units.input_tokens, completion_tokens=res.meta.billed_units.output_tokens, ) elif hasattr(res.meta, "tokens"): record_token_usage( span, prompt_tokens=res.meta.tokens.input_tokens, completion_tokens=res.meta.tokens.output_tokens, ) if hasattr(res.meta, "warnings"): set_data_normalized(span, "ai.warnings", res.meta.warnings) @wraps(f) def new_chat(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(CohereIntegration) if ( integration is None or "message" not in kwargs or not isinstance(kwargs.get("message"), str) ): return f(*args, **kwargs) message = kwargs.get("message") span = sentry_sdk.start_span( op=consts.OP.COHERE_CHAT_COMPLETIONS_CREATE, name="cohere.client.Chat", origin=CohereIntegration.origin, ) span.__enter__() try: res = f(*args, **kwargs) except Exception as e: _capture_exception(e) span.__exit__(None, None, None) raise e from None with capture_internal_exceptions(): if should_send_default_pii() and integration.include_prompts: set_data_normalized( span, SPANDATA.AI_INPUT_MESSAGES, list( map( lambda x: { "role": getattr(x, "role", "").lower(), "content": getattr(x, "message", ""), }, kwargs.get("chat_history", []), ) ) + [{"role": "user", "content": message}], ) for k, v in COLLECTED_PII_CHAT_PARAMS.items(): if k in kwargs: set_data_normalized(span, v, kwargs[k]) for k, v in COLLECTED_CHAT_PARAMS.items(): if k in kwargs: set_data_normalized(span, v, kwargs[k]) set_data_normalized(span, SPANDATA.AI_STREAMING, False) if streaming: old_iterator = res def new_iterator(): # type: () -> Iterator[StreamedChatResponse] with capture_internal_exceptions(): for x in old_iterator: if isinstance(x, ChatStreamEndEvent) or isinstance( x, StreamEndStreamedChatResponse ): collect_chat_response_fields( span, x.response, include_pii=should_send_default_pii() and integration.include_prompts, ) yield x span.__exit__(None, None, None) return new_iterator() elif isinstance(res, NonStreamedChatResponse): collect_chat_response_fields( span, res, include_pii=should_send_default_pii() and integration.include_prompts, ) span.__exit__(None, None, None) else: set_data_normalized(span, "unknown_response", True) span.__exit__(None, None, None) return res return new_chat def _wrap_embed(f): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(f) def new_embed(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(CohereIntegration) if integration is None: return f(*args, **kwargs) with sentry_sdk.start_span( op=consts.OP.COHERE_EMBEDDINGS_CREATE, name="Cohere Embedding Creation", origin=CohereIntegration.origin, ) as span: if "texts" in kwargs and ( should_send_default_pii() and integration.include_prompts ): if isinstance(kwargs["texts"], str): set_data_normalized(span, "ai.texts", [kwargs["texts"]]) elif ( isinstance(kwargs["texts"], list) and len(kwargs["texts"]) > 0 and isinstance(kwargs["texts"][0], str) ): set_data_normalized( span, SPANDATA.AI_INPUT_MESSAGES, kwargs["texts"] ) if "model" in kwargs: set_data_normalized(span, SPANDATA.AI_MODEL_ID, kwargs["model"]) try: res = f(*args, **kwargs) except Exception as e: _capture_exception(e) raise e from None if ( hasattr(res, "meta") and hasattr(res.meta, "billed_units") and hasattr(res.meta.billed_units, "input_tokens") ): record_token_usage( span, prompt_tokens=res.meta.billed_units.input_tokens, total_tokens=res.meta.billed_units.input_tokens, ) return res return new_embed sentry-python-2.18.0/sentry_sdk/integrations/dedupe.py000066400000000000000000000022231471214654000231630ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.utils import ContextVar from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional from sentry_sdk._types import Event, Hint class DedupeIntegration(Integration): identifier = "dedupe" def __init__(self): # type: () -> None self._last_seen = ContextVar("last-seen") @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def processor(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] if hint is None: return event integration = sentry_sdk.get_client().get_integration(DedupeIntegration) if integration is None: return event exc_info = hint.get("exc_info", None) if exc_info is None: return event exc = exc_info[1] if integration._last_seen.get(None) is exc: return None integration._last_seen.set(exc) return event sentry-python-2.18.0/sentry_sdk/integrations/django/000077500000000000000000000000001471214654000226065ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/integrations/django/__init__.py000066400000000000000000000606371471214654000247330ustar00rootroot00000000000000import inspect import sys import threading import weakref from importlib import import_module import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.scope import add_global_event_processor, should_send_default_pii from sentry_sdk.serializer import add_global_repr_processor from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_URL from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import ( AnnotatedValue, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, SENSITIVE_DATA_SUBSTITUTE, logger, capture_internal_exceptions, ensure_integration_enabled, event_from_exception, transaction_from_function, walk_exception_chain, ) from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, RequestExtractor, ) try: from django import VERSION as DJANGO_VERSION from django.conf import settings as django_settings from django.core import signals from django.conf import settings try: from django.urls import resolve except ImportError: from django.core.urlresolvers import resolve try: from django.urls import Resolver404 except ImportError: from django.core.urlresolvers import Resolver404 # Only available in Django 3.0+ try: from django.core.handlers.asgi import ASGIRequest except Exception: ASGIRequest = None except ImportError: raise DidNotEnable("Django not installed") from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER from sentry_sdk.integrations.django.templates import ( get_template_frame_from_exception, patch_templates, ) from sentry_sdk.integrations.django.middleware import patch_django_middlewares from sentry_sdk.integrations.django.signals_handlers import patch_signals from sentry_sdk.integrations.django.views import patch_views if DJANGO_VERSION[:2] > (1, 8): from sentry_sdk.integrations.django.caching import patch_caching else: patch_caching = None # type: ignore from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Optional from typing import Union from typing import List from django.core.handlers.wsgi import WSGIRequest from django.http.response import HttpResponse from django.http.request import QueryDict from django.utils.datastructures import MultiValueDict from sentry_sdk.tracing import Span from sentry_sdk.integrations.wsgi import _ScopedResponse from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType if DJANGO_VERSION < (1, 10): def is_authenticated(request_user): # type: (Any) -> bool return request_user.is_authenticated() else: def is_authenticated(request_user): # type: (Any) -> bool return request_user.is_authenticated TRANSACTION_STYLE_VALUES = ("function_name", "url") class DjangoIntegration(Integration): """ Auto instrument a Django application. :param transaction_style: How to derive transaction names. Either `"function_name"` or `"url"`. Defaults to `"url"`. :param middleware_spans: Whether to create spans for middleware. Defaults to `True`. :param signals_spans: Whether to create spans for signals. Defaults to `True`. :param signals_denylist: A list of signals to ignore when creating spans. :param cache_spans: Whether to create spans for cache operations. Defaults to `False`. """ identifier = "django" origin = f"auto.http.{identifier}" origin_db = f"auto.db.{identifier}" transaction_style = "" middleware_spans = None signals_spans = None cache_spans = None signals_denylist = [] # type: list[signals.Signal] def __init__( self, transaction_style="url", # type: str middleware_spans=True, # type: bool signals_spans=True, # type: bool cache_spans=False, # type: bool signals_denylist=None, # type: Optional[list[signals.Signal]] http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] ): # type: (...) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style self.middleware_spans = middleware_spans self.signals_spans = signals_spans self.signals_denylist = signals_denylist or [] self.cache_spans = cache_spans self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture)) @staticmethod def setup_once(): # type: () -> None if DJANGO_VERSION < (1, 8): raise DidNotEnable("Django 1.8 or newer is required.") install_sql_hook() # Patch in our custom middleware. # logs an error for every 500 ignore_logger("django.server") ignore_logger("django.request") from django.core.handlers.wsgi import WSGIHandler old_app = WSGIHandler.__call__ @ensure_integration_enabled(DjangoIntegration, old_app) def sentry_patched_wsgi_handler(self, environ, start_response): # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse bound_old_app = old_app.__get__(self, WSGIHandler) from django.conf import settings use_x_forwarded_for = settings.USE_X_FORWARDED_HOST integration = sentry_sdk.get_client().get_integration(DjangoIntegration) middleware = SentryWsgiMiddleware( bound_old_app, use_x_forwarded_for, span_origin=DjangoIntegration.origin, http_methods_to_capture=( integration.http_methods_to_capture if integration else DEFAULT_HTTP_METHODS_TO_CAPTURE ), ) return middleware(environ, start_response) WSGIHandler.__call__ = sentry_patched_wsgi_handler _patch_get_response() _patch_django_asgi_handler() signals.got_request_exception.connect(_got_request_exception) @add_global_event_processor def process_django_templates(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] if hint is None: return event exc_info = hint.get("exc_info", None) if exc_info is None: return event exception = event.get("exception", None) if exception is None: return event values = exception.get("values", None) if values is None: return event for exception, (_, exc_value, _) in zip( reversed(values), walk_exception_chain(exc_info) ): frame = get_template_frame_from_exception(exc_value) if frame is not None: frames = exception.get("stacktrace", {}).get("frames", []) for i in reversed(range(len(frames))): f = frames[i] if ( f.get("function") in ("Parser.parse", "parse", "render") and f.get("module") == "django.template.base" ): i += 1 break else: i = len(frames) frames.insert(i, frame) return event @add_global_repr_processor def _django_queryset_repr(value, hint): # type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str] try: # Django 1.6 can fail to import `QuerySet` when Django settings # have not yet been initialized. # # If we fail to import, return `NotImplemented`. It's at least # unlikely that we have a query set in `value` when importing # `QuerySet` fails. from django.db.models.query import QuerySet except Exception: return NotImplemented if not isinstance(value, QuerySet) or value._result_cache: return NotImplemented return "<%s from %s at 0x%x>" % ( value.__class__.__name__, value.__module__, id(value), ) _patch_channels() patch_django_middlewares() patch_views() patch_templates() patch_signals() if patch_caching is not None: patch_caching() _DRF_PATCHED = False _DRF_PATCH_LOCK = threading.Lock() def _patch_drf(): # type: () -> None """ Patch Django Rest Framework for more/better request data. DRF's request type is a wrapper around Django's request type. The attribute we're interested in is `request.data`, which is a cached property containing a parsed request body. Reading a request body from that property is more reliable than reading from any of Django's own properties, as those don't hold payloads in memory and therefore can only be accessed once. We patch the Django request object to include a weak backreference to the DRF request object, such that we can later use either in `DjangoRequestExtractor`. This function is not called directly on SDK setup, because importing almost any part of Django Rest Framework will try to access Django settings (where `sentry_sdk.init()` might be called from in the first place). Instead we run this function on every request and do the patching on the first request. """ global _DRF_PATCHED if _DRF_PATCHED: # Double-checked locking return with _DRF_PATCH_LOCK: if _DRF_PATCHED: return # We set this regardless of whether the code below succeeds or fails. # There is no point in trying to patch again on the next request. _DRF_PATCHED = True with capture_internal_exceptions(): try: from rest_framework.views import APIView # type: ignore except ImportError: pass else: old_drf_initial = APIView.initial def sentry_patched_drf_initial(self, request, *args, **kwargs): # type: (APIView, Any, *Any, **Any) -> Any with capture_internal_exceptions(): request._request._sentry_drf_request_backref = weakref.ref( request ) pass return old_drf_initial(self, request, *args, **kwargs) APIView.initial = sentry_patched_drf_initial def _patch_channels(): # type: () -> None try: from channels.http import AsgiHandler # type: ignore except ImportError: return if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. # # We cannot hard-raise here because channels may not be used at all in # the current process. That is the case when running traditional WSGI # workers in gunicorn+gevent and the websocket stuff in a separate # process. logger.warning( "We detected that you are using Django channels 2.0." + CONTEXTVARS_ERROR_MESSAGE ) from sentry_sdk.integrations.django.asgi import patch_channels_asgi_handler_impl patch_channels_asgi_handler_impl(AsgiHandler) def _patch_django_asgi_handler(): # type: () -> None try: from django.core.handlers.asgi import ASGIHandler except ImportError: return if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. # # We cannot hard-raise here because Django's ASGI stuff may not be used # at all. logger.warning( "We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE ) from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl patch_django_asgi_handler_impl(ASGIHandler) def _set_transaction_name_and_source(scope, transaction_style, request): # type: (sentry_sdk.Scope, str, WSGIRequest) -> None try: transaction_name = None if transaction_style == "function_name": fn = resolve(request.path).func transaction_name = transaction_from_function(getattr(fn, "view_class", fn)) elif transaction_style == "url": if hasattr(request, "urlconf"): transaction_name = LEGACY_RESOLVER.resolve( request.path_info, urlconf=request.urlconf ) else: transaction_name = LEGACY_RESOLVER.resolve(request.path_info) if transaction_name is None: transaction_name = request.path_info source = TRANSACTION_SOURCE_URL else: source = SOURCE_FOR_STYLE[transaction_style] scope.set_transaction_name( transaction_name, source=source, ) except Resolver404: urlconf = import_module(settings.ROOT_URLCONF) # This exception only gets thrown when transaction_style is `function_name` # So we don't check here what style is configured if hasattr(urlconf, "handler404"): handler = urlconf.handler404 if isinstance(handler, str): scope.transaction = handler else: scope.transaction = transaction_from_function( getattr(handler, "view_class", handler) ) except Exception: pass def _before_get_response(request): # type: (WSGIRequest) -> None integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is None: return _patch_drf() scope = sentry_sdk.get_current_scope() # Rely on WSGI middleware to start a trace _set_transaction_name_and_source(scope, integration.transaction_style, request) scope.add_event_processor( _make_wsgi_request_event_processor(weakref.ref(request), integration) ) def _attempt_resolve_again(request, scope, transaction_style): # type: (WSGIRequest, sentry_sdk.Scope, str) -> None """ Some django middlewares overwrite request.urlconf so we need to respect that contract, so we try to resolve the url again. """ if not hasattr(request, "urlconf"): return _set_transaction_name_and_source(scope, transaction_style, request) def _after_get_response(request): # type: (WSGIRequest) -> None integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is None or integration.transaction_style != "url": return scope = sentry_sdk.get_current_scope() _attempt_resolve_again(request, scope, integration.transaction_style) def _patch_get_response(): # type: () -> None """ patch get_response, because at that point we have the Django request object """ from django.core.handlers.base import BaseHandler old_get_response = BaseHandler.get_response def sentry_patched_get_response(self, request): # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException] _before_get_response(request) rv = old_get_response(self, request) _after_get_response(request) return rv BaseHandler.get_response = sentry_patched_get_response if hasattr(BaseHandler, "get_response_async"): from sentry_sdk.integrations.django.asgi import patch_get_response_async patch_get_response_async(BaseHandler, _before_get_response) def _make_wsgi_request_event_processor(weak_request, integration): # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor def wsgi_request_event_processor(event, hint): # type: (Event, dict[str, Any]) -> Event # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. request = weak_request() if request is None: return event django_3 = ASGIRequest is not None if django_3 and type(request) == ASGIRequest: # We have a `asgi_request_event_processor` for this. return event with capture_internal_exceptions(): DjangoRequestExtractor(request).extract_into_event(event) if should_send_default_pii(): with capture_internal_exceptions(): _set_user_info(request, event) return event return wsgi_request_event_processor def _got_request_exception(request=None, **kwargs): # type: (WSGIRequest, **Any) -> None client = sentry_sdk.get_client() integration = client.get_integration(DjangoIntegration) if integration is None: return if request is not None and integration.transaction_style == "url": scope = sentry_sdk.get_current_scope() _attempt_resolve_again(request, scope, integration.transaction_style) event, hint = event_from_exception( sys.exc_info(), client_options=client.options, mechanism={"type": "django", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) class DjangoRequestExtractor(RequestExtractor): def __init__(self, request): # type: (Union[WSGIRequest, ASGIRequest]) -> None try: drf_request = request._sentry_drf_request_backref() if drf_request is not None: request = drf_request except AttributeError: pass self.request = request def env(self): # type: () -> Dict[str, str] return self.request.META def cookies(self): # type: () -> Dict[str, Union[str, AnnotatedValue]] privacy_cookies = [ django_settings.CSRF_COOKIE_NAME, django_settings.SESSION_COOKIE_NAME, ] clean_cookies = {} # type: Dict[str, Union[str, AnnotatedValue]] for key, val in self.request.COOKIES.items(): if key in privacy_cookies: clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE else: clean_cookies[key] = val return clean_cookies def raw_data(self): # type: () -> bytes return self.request.body def form(self): # type: () -> QueryDict return self.request.POST def files(self): # type: () -> MultiValueDict return self.request.FILES def size_of_file(self, file): # type: (Any) -> int return file.size def parsed_body(self): # type: () -> Optional[Dict[str, Any]] try: return self.request.data except AttributeError: return RequestExtractor.parsed_body(self) def _set_user_info(request, event): # type: (WSGIRequest, Event) -> None user_info = event.setdefault("user", {}) user = getattr(request, "user", None) if user is None or not is_authenticated(user): return try: user_info.setdefault("id", str(user.pk)) except Exception: pass try: user_info.setdefault("email", user.email) except Exception: pass try: user_info.setdefault("username", user.get_username()) except Exception: pass def install_sql_hook(): # type: () -> None """If installed this causes Django's queries to be captured.""" try: from django.db.backends.utils import CursorWrapper except ImportError: from django.db.backends.util import CursorWrapper try: # django 1.6 and 1.7 compatability from django.db.backends import BaseDatabaseWrapper except ImportError: # django 1.8 or later from django.db.backends.base.base import BaseDatabaseWrapper try: real_execute = CursorWrapper.execute real_executemany = CursorWrapper.executemany real_connect = BaseDatabaseWrapper.connect except AttributeError: # This won't work on Django versions < 1.6 return @ensure_integration_enabled(DjangoIntegration, real_execute) def execute(self, sql, params=None): # type: (CursorWrapper, Any, Optional[Any]) -> Any with record_sql_queries( cursor=self.cursor, query=sql, params_list=params, paramstyle="format", executemany=False, span_origin=DjangoIntegration.origin_db, ) as span: _set_db_data(span, self) result = real_execute(self, sql, params) with capture_internal_exceptions(): add_query_source(span) return result @ensure_integration_enabled(DjangoIntegration, real_executemany) def executemany(self, sql, param_list): # type: (CursorWrapper, Any, List[Any]) -> Any with record_sql_queries( cursor=self.cursor, query=sql, params_list=param_list, paramstyle="format", executemany=True, span_origin=DjangoIntegration.origin_db, ) as span: _set_db_data(span, self) result = real_executemany(self, sql, param_list) with capture_internal_exceptions(): add_query_source(span) return result @ensure_integration_enabled(DjangoIntegration, real_connect) def connect(self): # type: (BaseDatabaseWrapper) -> None with capture_internal_exceptions(): sentry_sdk.add_breadcrumb(message="connect", category="query") with sentry_sdk.start_span( op=OP.DB, name="connect", origin=DjangoIntegration.origin_db, ) as span: _set_db_data(span, self) return real_connect(self) CursorWrapper.execute = execute CursorWrapper.executemany = executemany BaseDatabaseWrapper.connect = connect ignore_logger("django.db.backends") def _set_db_data(span, cursor_or_db): # type: (Span, Any) -> None db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db vendor = db.vendor span.set_data(SPANDATA.DB_SYSTEM, vendor) # Some custom backends override `__getattr__`, making it look like `cursor_or_db` # actually has a `connection` and the `connection` has a `get_dsn_parameters` # attribute, only to throw an error once you actually want to call it. # Hence the `inspect` check whether `get_dsn_parameters` is an actual callable # function. is_psycopg2 = ( hasattr(cursor_or_db, "connection") and hasattr(cursor_or_db.connection, "get_dsn_parameters") and inspect.isroutine(cursor_or_db.connection.get_dsn_parameters) ) if is_psycopg2: connection_params = cursor_or_db.connection.get_dsn_parameters() else: try: # psycopg3, only extract needed params as get_parameters # can be slow because of the additional logic to filter out default # values connection_params = { "dbname": cursor_or_db.connection.info.dbname, "port": cursor_or_db.connection.info.port, } # PGhost returns host or base dir of UNIX socket as an absolute path # starting with /, use it only when it contains host pg_host = cursor_or_db.connection.info.host if pg_host and not pg_host.startswith("/"): connection_params["host"] = pg_host except Exception: connection_params = db.get_connection_params() db_name = connection_params.get("dbname") or connection_params.get("database") if db_name is not None: span.set_data(SPANDATA.DB_NAME, db_name) server_address = connection_params.get("host") if server_address is not None: span.set_data(SPANDATA.SERVER_ADDRESS, server_address) server_port = connection_params.get("port") if server_port is not None: span.set_data(SPANDATA.SERVER_PORT, str(server_port)) server_socket_address = connection_params.get("unix_socket") if server_socket_address is not None: span.set_data(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address) sentry-python-2.18.0/sentry_sdk/integrations/django/asgi.py000066400000000000000000000201551471214654000241060ustar00rootroot00000000000000""" Instrumentation for Django 3.0 Since this file contains `async def` it is conditionally imported in `sentry_sdk.integrations.django` (depending on the existence of `django.core.handlers.asgi`. """ import asyncio import functools import inspect from django.core.handlers.wsgi import WSGIRequest import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable, Union, TypeVar from django.core.handlers.asgi import ASGIRequest from django.http.response import HttpResponse from sentry_sdk._types import Event, EventProcessor _F = TypeVar("_F", bound=Callable[..., Any]) # Python 3.12 deprecates asyncio.iscoroutinefunction() as an alias for # inspect.iscoroutinefunction(), whilst also removing the _is_coroutine marker. # The latter is replaced with the inspect.markcoroutinefunction decorator. # Until 3.12 is the minimum supported Python version, provide a shim. # This was copied from https://github.com/django/asgiref/blob/main/asgiref/sync.py if hasattr(inspect, "markcoroutinefunction"): iscoroutinefunction = inspect.iscoroutinefunction markcoroutinefunction = inspect.markcoroutinefunction else: iscoroutinefunction = asyncio.iscoroutinefunction # type: ignore[assignment] def markcoroutinefunction(func: "_F") -> "_F": func._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore return func def _make_asgi_request_event_processor(request): # type: (ASGIRequest) -> EventProcessor def asgi_request_event_processor(event, hint): # type: (Event, dict[str, Any]) -> Event # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. from sentry_sdk.integrations.django import ( DjangoRequestExtractor, _set_user_info, ) if request is None: return event if type(request) == WSGIRequest: return event with capture_internal_exceptions(): DjangoRequestExtractor(request).extract_into_event(event) if should_send_default_pii(): with capture_internal_exceptions(): _set_user_info(request, event) return event return asgi_request_event_processor def patch_django_asgi_handler_impl(cls): # type: (Any) -> None from sentry_sdk.integrations.django import DjangoIntegration old_app = cls.__call__ async def sentry_patched_asgi_handler(self, scope, receive, send): # type: (Any, Any, Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is None: return await old_app(self, scope, receive, send) middleware = SentryAsgiMiddleware( old_app.__get__(self, cls), unsafe_context_data=True, span_origin=DjangoIntegration.origin, http_methods_to_capture=integration.http_methods_to_capture, )._run_asgi3 return await middleware(scope, receive, send) cls.__call__ = sentry_patched_asgi_handler modern_django_asgi_support = hasattr(cls, "create_request") if modern_django_asgi_support: old_create_request = cls.create_request @ensure_integration_enabled(DjangoIntegration, old_create_request) def sentry_patched_create_request(self, *args, **kwargs): # type: (Any, *Any, **Any) -> Any request, error_response = old_create_request(self, *args, **kwargs) scope = sentry_sdk.get_isolation_scope() scope.add_event_processor(_make_asgi_request_event_processor(request)) return request, error_response cls.create_request = sentry_patched_create_request def patch_get_response_async(cls, _before_get_response): # type: (Any, Any) -> None old_get_response_async = cls.get_response_async async def sentry_patched_get_response_async(self, request): # type: (Any, Any) -> Union[HttpResponse, BaseException] _before_get_response(request) return await old_get_response_async(self, request) cls.get_response_async = sentry_patched_get_response_async def patch_channels_asgi_handler_impl(cls): # type: (Any) -> None import channels # type: ignore from sentry_sdk.integrations.django import DjangoIntegration if channels.__version__ < "3.0.0": old_app = cls.__call__ async def sentry_patched_asgi_handler(self, receive, send): # type: (Any, Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is None: return await old_app(self, receive, send) middleware = SentryAsgiMiddleware( lambda _scope: old_app.__get__(self, cls), unsafe_context_data=True, span_origin=DjangoIntegration.origin, http_methods_to_capture=integration.http_methods_to_capture, ) return await middleware(self.scope)(receive, send) cls.__call__ = sentry_patched_asgi_handler else: # The ASGI handler in Channels >= 3 has the same signature as # the Django handler. patch_django_asgi_handler_impl(cls) def wrap_async_view(callback): # type: (Any) -> Any from sentry_sdk.integrations.django import DjangoIntegration @functools.wraps(callback) async def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() if current_scope.transaction is not None: current_scope.transaction.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() with sentry_sdk.start_span( op=OP.VIEW_RENDER, name=request.resolver_match.view_name, origin=DjangoIntegration.origin, ): return await callback(request, *args, **kwargs) return sentry_wrapped_callback def _asgi_middleware_mixin_factory(_check_middleware_span): # type: (Callable[..., Any]) -> Any """ Mixin class factory that generates a middleware mixin for handling requests in async mode. """ class SentryASGIMixin: if TYPE_CHECKING: _inner = None def __init__(self, get_response): # type: (Callable[..., Any]) -> None self.get_response = get_response self._acall_method = None self._async_check() def _async_check(self): # type: () -> None """ If get_response is a coroutine function, turns us into async mode so a thread is not consumed during a whole request. Taken from django.utils.deprecation::MiddlewareMixin._async_check """ if iscoroutinefunction(self.get_response): markcoroutinefunction(self) def async_route_check(self): # type: () -> bool """ Function that checks if we are in async mode, and if we are forwards the handling of requests to __acall__ """ return iscoroutinefunction(self.get_response) async def __acall__(self, *args, **kwargs): # type: (*Any, **Any) -> Any f = self._acall_method if f is None: if hasattr(self._inner, "__acall__"): self._acall_method = f = self._inner.__acall__ # type: ignore else: self._acall_method = f = self._inner middleware_span = _check_middleware_span(old_method=f) if middleware_span is None: return await f(*args, **kwargs) with middleware_span: return await f(*args, **kwargs) return SentryASGIMixin sentry-python-2.18.0/sentry_sdk/integrations/django/caching.py000066400000000000000000000140601471214654000245550ustar00rootroot00000000000000import functools from typing import TYPE_CHECKING from sentry_sdk.integrations.redis.utils import _get_safe_key, _key_as_string from urllib3.util import parse_url as urlparse from django import VERSION as DJANGO_VERSION from django.core.cache import CacheHandler import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, ) if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Optional METHODS_TO_INSTRUMENT = [ "set", "set_many", "get", "get_many", ] def _get_span_description(method_name, args, kwargs): # type: (str, tuple[Any], dict[str, Any]) -> str return _key_as_string(_get_safe_key(method_name, args, kwargs)) def _patch_cache_method(cache, method_name, address, port): # type: (CacheHandler, str, Optional[str], Optional[int]) -> None from sentry_sdk.integrations.django import DjangoIntegration original_method = getattr(cache, method_name) @ensure_integration_enabled(DjangoIntegration, original_method) def _instrument_call( cache, method_name, original_method, args, kwargs, address, port ): # type: (CacheHandler, str, Callable[..., Any], tuple[Any, ...], dict[str, Any], Optional[str], Optional[int]) -> Any is_set_operation = method_name.startswith("set") is_get_operation = not is_set_operation op = OP.CACHE_PUT if is_set_operation else OP.CACHE_GET description = _get_span_description(method_name, args, kwargs) with sentry_sdk.start_span( op=op, name=description, origin=DjangoIntegration.origin, ) as span: value = original_method(*args, **kwargs) with capture_internal_exceptions(): if address is not None: span.set_data(SPANDATA.NETWORK_PEER_ADDRESS, address) if port is not None: span.set_data(SPANDATA.NETWORK_PEER_PORT, port) key = _get_safe_key(method_name, args, kwargs) if key is not None: span.set_data(SPANDATA.CACHE_KEY, key) item_size = None if is_get_operation: if value: item_size = len(str(value)) span.set_data(SPANDATA.CACHE_HIT, True) else: span.set_data(SPANDATA.CACHE_HIT, False) else: try: # 'set' command item_size = len(str(args[1])) except IndexError: # 'set_many' command item_size = len(str(args[0])) if item_size is not None: span.set_data(SPANDATA.CACHE_ITEM_SIZE, item_size) return value @functools.wraps(original_method) def sentry_method(*args, **kwargs): # type: (*Any, **Any) -> Any return _instrument_call( cache, method_name, original_method, args, kwargs, address, port ) setattr(cache, method_name, sentry_method) def _patch_cache(cache, address=None, port=None): # type: (CacheHandler, Optional[str], Optional[int]) -> None if not hasattr(cache, "_sentry_patched"): for method_name in METHODS_TO_INSTRUMENT: _patch_cache_method(cache, method_name, address, port) cache._sentry_patched = True def _get_address_port(settings): # type: (dict[str, Any]) -> tuple[Optional[str], Optional[int]] location = settings.get("LOCATION") # TODO: location can also be an array of locations # see: https://docs.djangoproject.com/en/5.0/topics/cache/#redis # GitHub issue: https://github.com/getsentry/sentry-python/issues/3062 if not isinstance(location, str): return None, None if "://" in location: parsed_url = urlparse(location) # remove the username and password from URL to not leak sensitive data. address = "{}://{}{}".format( parsed_url.scheme or "", parsed_url.hostname or "", parsed_url.path or "", ) port = parsed_url.port else: address = location port = None return address, int(port) if port is not None else None def patch_caching(): # type: () -> None from sentry_sdk.integrations.django import DjangoIntegration if not hasattr(CacheHandler, "_sentry_patched"): if DJANGO_VERSION < (3, 2): original_get_item = CacheHandler.__getitem__ @functools.wraps(original_get_item) def sentry_get_item(self, alias): # type: (CacheHandler, str) -> Any cache = original_get_item(self, alias) integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is not None and integration.cache_spans: from django.conf import settings address, port = _get_address_port( settings.CACHES[alias or "default"] ) _patch_cache(cache, address, port) return cache CacheHandler.__getitem__ = sentry_get_item CacheHandler._sentry_patched = True else: original_create_connection = CacheHandler.create_connection @functools.wraps(original_create_connection) def sentry_create_connection(self, alias): # type: (CacheHandler, str) -> Any cache = original_create_connection(self, alias) integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is not None and integration.cache_spans: address, port = _get_address_port(self.settings[alias or "default"]) _patch_cache(cache, address, port) return cache CacheHandler.create_connection = sentry_create_connection CacheHandler._sentry_patched = True sentry-python-2.18.0/sentry_sdk/integrations/django/middleware.py000066400000000000000000000135711471214654000253040ustar00rootroot00000000000000""" Create spans from Django middleware invocations """ from functools import wraps from django import VERSION as DJANGO_VERSION import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.utils import ( ContextVar, transaction_from_function, capture_internal_exceptions, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Optional from typing import TypeVar from sentry_sdk.tracing import Span F = TypeVar("F", bound=Callable[..., Any]) _import_string_should_wrap_middleware = ContextVar( "import_string_should_wrap_middleware" ) DJANGO_SUPPORTS_ASYNC_MIDDLEWARE = DJANGO_VERSION >= (3, 1) if not DJANGO_SUPPORTS_ASYNC_MIDDLEWARE: _asgi_middleware_mixin_factory = lambda _: object else: from .asgi import _asgi_middleware_mixin_factory def patch_django_middlewares(): # type: () -> None from django.core.handlers import base old_import_string = base.import_string def sentry_patched_import_string(dotted_path): # type: (str) -> Any rv = old_import_string(dotted_path) if _import_string_should_wrap_middleware.get(None): rv = _wrap_middleware(rv, dotted_path) return rv base.import_string = sentry_patched_import_string old_load_middleware = base.BaseHandler.load_middleware def sentry_patched_load_middleware(*args, **kwargs): # type: (Any, Any) -> Any _import_string_should_wrap_middleware.set(True) try: return old_load_middleware(*args, **kwargs) finally: _import_string_should_wrap_middleware.set(False) base.BaseHandler.load_middleware = sentry_patched_load_middleware def _wrap_middleware(middleware, middleware_name): # type: (Any, str) -> Any from sentry_sdk.integrations.django import DjangoIntegration def _check_middleware_span(old_method): # type: (Callable[..., Any]) -> Optional[Span] integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is None or not integration.middleware_spans: return None function_name = transaction_from_function(old_method) description = middleware_name function_basename = getattr(old_method, "__name__", None) if function_basename: description = "{}.{}".format(description, function_basename) middleware_span = sentry_sdk.start_span( op=OP.MIDDLEWARE_DJANGO, name=description, origin=DjangoIntegration.origin, ) middleware_span.set_tag("django.function_name", function_name) middleware_span.set_tag("django.middleware_name", middleware_name) return middleware_span def _get_wrapped_method(old_method): # type: (F) -> F with capture_internal_exceptions(): def sentry_wrapped_method(*args, **kwargs): # type: (*Any, **Any) -> Any middleware_span = _check_middleware_span(old_method) if middleware_span is None: return old_method(*args, **kwargs) with middleware_span: return old_method(*args, **kwargs) try: # fails for __call__ of function on Python 2 (see py2.7-django-1.11) sentry_wrapped_method = wraps(old_method)(sentry_wrapped_method) # Necessary for Django 3.1 sentry_wrapped_method.__self__ = old_method.__self__ # type: ignore except Exception: pass return sentry_wrapped_method # type: ignore return old_method class SentryWrappingMiddleware( _asgi_middleware_mixin_factory(_check_middleware_span) # type: ignore ): sync_capable = getattr(middleware, "sync_capable", True) async_capable = DJANGO_SUPPORTS_ASYNC_MIDDLEWARE and getattr( middleware, "async_capable", False ) def __init__(self, get_response=None, *args, **kwargs): # type: (Optional[Callable[..., Any]], *Any, **Any) -> None if get_response: self._inner = middleware(get_response, *args, **kwargs) else: self._inner = middleware(*args, **kwargs) self.get_response = get_response self._call_method = None if self.async_capable: super().__init__(get_response) # We need correct behavior for `hasattr()`, which we can only determine # when we have an instance of the middleware we're wrapping. def __getattr__(self, method_name): # type: (str) -> Any if method_name not in ( "process_request", "process_view", "process_template_response", "process_response", "process_exception", ): raise AttributeError() old_method = getattr(self._inner, method_name) rv = _get_wrapped_method(old_method) self.__dict__[method_name] = rv return rv def __call__(self, *args, **kwargs): # type: (*Any, **Any) -> Any if hasattr(self, "async_route_check") and self.async_route_check(): return self.__acall__(*args, **kwargs) f = self._call_method if f is None: self._call_method = f = self._inner.__call__ middleware_span = _check_middleware_span(old_method=f) if middleware_span is None: return f(*args, **kwargs) with middleware_span: return f(*args, **kwargs) for attr in ( "__name__", "__module__", "__qualname__", ): if hasattr(middleware, attr): setattr(SentryWrappingMiddleware, attr, getattr(middleware, attr)) return SentryWrappingMiddleware sentry-python-2.18.0/sentry_sdk/integrations/django/signals_handlers.py000066400000000000000000000060321471214654000265010ustar00rootroot00000000000000from functools import wraps from django.dispatch import Signal import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations.django import DJANGO_VERSION from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Callable from typing import Any, Union def _get_receiver_name(receiver): # type: (Callable[..., Any]) -> str name = "" if hasattr(receiver, "__qualname__"): name = receiver.__qualname__ elif hasattr(receiver, "__name__"): # Python 2.7 has no __qualname__ name = receiver.__name__ elif hasattr( receiver, "func" ): # certain functions (like partials) dont have a name if hasattr(receiver, "func") and hasattr(receiver.func, "__name__"): name = "partial()" if ( name == "" ): # In case nothing was found, return the string representation (this is the slowest case) return str(receiver) if hasattr(receiver, "__module__"): # prepend with module, if there is one name = receiver.__module__ + "." + name return name def patch_signals(): # type: () -> None """ Patch django signal receivers to create a span. This only wraps sync receivers. Django>=5.0 introduced async receivers, but since we don't create transactions for ASGI Django, we don't wrap them. """ from sentry_sdk.integrations.django import DjangoIntegration old_live_receivers = Signal._live_receivers def _sentry_live_receivers(self, sender): # type: (Signal, Any) -> Union[tuple[list[Callable[..., Any]], list[Callable[..., Any]]], list[Callable[..., Any]]] if DJANGO_VERSION >= (5, 0): sync_receivers, async_receivers = old_live_receivers(self, sender) else: sync_receivers = old_live_receivers(self, sender) async_receivers = [] def sentry_sync_receiver_wrapper(receiver): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(receiver) def wrapper(*args, **kwargs): # type: (Any, Any) -> Any signal_name = _get_receiver_name(receiver) with sentry_sdk.start_span( op=OP.EVENT_DJANGO, name=signal_name, origin=DjangoIntegration.origin, ) as span: span.set_data("signal", signal_name) return receiver(*args, **kwargs) return wrapper integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if ( integration and integration.signals_spans and self not in integration.signals_denylist ): for idx, receiver in enumerate(sync_receivers): sync_receivers[idx] = sentry_sync_receiver_wrapper(receiver) if DJANGO_VERSION >= (5, 0): return sync_receivers, async_receivers else: return sync_receivers Signal._live_receivers = _sentry_live_receivers sentry-python-2.18.0/sentry_sdk/integrations/django/templates.py000066400000000000000000000131351471214654000251610ustar00rootroot00000000000000import functools from django.template import TemplateSyntaxError from django.utils.safestring import mark_safe from django import VERSION as DJANGO_VERSION import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.utils import ensure_integration_enabled from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Optional from typing import Iterator from typing import Tuple try: # support Django 1.9 from django.template.base import Origin except ImportError: # backward compatibility from django.template.loader import LoaderOrigin as Origin def get_template_frame_from_exception(exc_value): # type: (Optional[BaseException]) -> Optional[Dict[str, Any]] # As of Django 1.9 or so the new template debug thing showed up. if hasattr(exc_value, "template_debug"): return _get_template_frame_from_debug(exc_value.template_debug) # type: ignore # As of r16833 (Django) all exceptions may contain a # ``django_template_source`` attribute (rather than the legacy # ``TemplateSyntaxError.source`` check) if hasattr(exc_value, "django_template_source"): return _get_template_frame_from_source( exc_value.django_template_source # type: ignore ) if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, "source"): source = exc_value.source if isinstance(source, (tuple, list)) and isinstance(source[0], Origin): return _get_template_frame_from_source(source) # type: ignore return None def _get_template_name_description(template_name): # type: (str) -> str if isinstance(template_name, (list, tuple)): if template_name: return "[{}, ...]".format(template_name[0]) else: return template_name def patch_templates(): # type: () -> None from django.template.response import SimpleTemplateResponse from sentry_sdk.integrations.django import DjangoIntegration real_rendered_content = SimpleTemplateResponse.rendered_content @property # type: ignore @ensure_integration_enabled(DjangoIntegration, real_rendered_content.fget) def rendered_content(self): # type: (SimpleTemplateResponse) -> str with sentry_sdk.start_span( op=OP.TEMPLATE_RENDER, name=_get_template_name_description(self.template_name), origin=DjangoIntegration.origin, ) as span: span.set_data("context", self.context_data) return real_rendered_content.fget(self) SimpleTemplateResponse.rendered_content = rendered_content if DJANGO_VERSION < (1, 7): return import django.shortcuts real_render = django.shortcuts.render @functools.wraps(real_render) @ensure_integration_enabled(DjangoIntegration, real_render) def render(request, template_name, context=None, *args, **kwargs): # type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse # Inject trace meta tags into template context context = context or {} if "sentry_trace_meta" not in context: context["sentry_trace_meta"] = mark_safe( sentry_sdk.get_current_scope().trace_propagation_meta() ) with sentry_sdk.start_span( op=OP.TEMPLATE_RENDER, name=_get_template_name_description(template_name), origin=DjangoIntegration.origin, ) as span: span.set_data("context", context) return real_render(request, template_name, context, *args, **kwargs) django.shortcuts.render = render def _get_template_frame_from_debug(debug): # type: (Dict[str, Any]) -> Dict[str, Any] if debug is None: return None lineno = debug["line"] filename = debug["name"] if filename is None: filename = "" pre_context = [] post_context = [] context_line = None for i, line in debug["source_lines"]: if i < lineno: pre_context.append(line) elif i > lineno: post_context.append(line) else: context_line = line return { "filename": filename, "lineno": lineno, "pre_context": pre_context[-5:], "post_context": post_context[:5], "context_line": context_line, "in_app": True, } def _linebreak_iter(template_source): # type: (str) -> Iterator[int] yield 0 p = template_source.find("\n") while p >= 0: yield p + 1 p = template_source.find("\n", p + 1) def _get_template_frame_from_source(source): # type: (Tuple[Origin, Tuple[int, int]]) -> Optional[Dict[str, Any]] if not source: return None origin, (start, end) = source filename = getattr(origin, "loadname", None) if filename is None: filename = "" template_source = origin.reload() lineno = None upto = 0 pre_context = [] post_context = [] context_line = None for num, next in enumerate(_linebreak_iter(template_source)): line = template_source[upto:next] if start >= upto and end <= next: lineno = num context_line = line elif lineno is None: pre_context.append(line) else: post_context.append(line) upto = next if context_line is None or lineno is None: return None return { "filename": filename, "lineno": lineno, "pre_context": pre_context[-5:], "post_context": post_context[:5], "context_line": context_line, } sentry-python-2.18.0/sentry_sdk/integrations/django/transactions.py000066400000000000000000000115271471214654000256760ustar00rootroot00000000000000""" Copied from raven-python. Despite being called "legacy" in some places this resolver is very much still in use. """ import re from typing import TYPE_CHECKING if TYPE_CHECKING: from django.urls.resolvers import URLResolver from typing import Dict from typing import List from typing import Optional from django.urls.resolvers import URLPattern from typing import Tuple from typing import Union from re import Pattern from django import VERSION as DJANGO_VERSION if DJANGO_VERSION >= (2, 0): from django.urls.resolvers import RoutePattern else: RoutePattern = None try: from django.urls import get_resolver except ImportError: from django.core.urlresolvers import get_resolver def get_regex(resolver_or_pattern): # type: (Union[URLPattern, URLResolver]) -> Pattern[str] """Utility method for django's deprecated resolver.regex""" try: regex = resolver_or_pattern.regex except AttributeError: regex = resolver_or_pattern.pattern.regex return regex class RavenResolver: _new_style_group_matcher = re.compile( r"<(?:([^>:]+):)?([^>]+)>" ) # https://github.com/django/django/blob/21382e2743d06efbf5623e7c9b6dccf2a325669b/django/urls/resolvers.py#L245-L247 _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)") _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+") _non_named_group_matcher = re.compile(r"\([^\)]+\)") # [foo|bar|baz] _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]") _camel_re = re.compile(r"([A-Z]+)([a-z])") _cache = {} # type: Dict[URLPattern, str] def _simplify(self, pattern): # type: (Union[URLPattern, URLResolver]) -> str r""" Clean up urlpattern regexes into something readable by humans: From: > "^(?P\w+)/athletes/(?P\w+)/$" To: > "{sport_slug}/athletes/{athlete_slug}/" """ # "new-style" path patterns can be parsed directly without turning them # into regexes first if ( RoutePattern is not None and hasattr(pattern, "pattern") and isinstance(pattern.pattern, RoutePattern) ): return self._new_style_group_matcher.sub( lambda m: "{%s}" % m.group(2), str(pattern.pattern._route) ) result = get_regex(pattern).pattern # remove optional params # TODO(dcramer): it'd be nice to change these into [%s] but it currently # conflicts with the other rules because we're doing regexp matches # rather than parsing tokens result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), result) # handle named groups first result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result) # handle non-named groups result = self._non_named_group_matcher.sub("{var}", result) # handle optional params result = self._either_option_matcher.sub(lambda m: m.group(1), result) # clean up any outstanding regex-y characters. result = ( result.replace("^", "") .replace("$", "") .replace("?", "") .replace("\\A", "") .replace("\\Z", "") .replace("//", "/") .replace("\\", "") ) return result def _resolve(self, resolver, path, parents=None): # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str] match = get_regex(resolver).search(path) # Django < 2.0 if not match: return None if parents is None: parents = [resolver] elif resolver not in parents: parents = parents + [resolver] new_path = path[match.end() :] for pattern in resolver.url_patterns: # this is an include() if not pattern.callback: match_ = self._resolve(pattern, new_path, parents) if match_: return match_ continue elif not get_regex(pattern).search(new_path): continue try: return self._cache[pattern] except KeyError: pass prefix = "".join(self._simplify(p) for p in parents) result = prefix + self._simplify(pattern) if not result.startswith("/"): result = "/" + result self._cache[pattern] = result return result return None def resolve( self, path, # type: str urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]] ): # type: (...) -> Optional[str] resolver = get_resolver(urlconf) match = self._resolve(resolver, path) return match LEGACY_RESOLVER = RavenResolver() sentry-python-2.18.0/sentry_sdk/integrations/django/views.py000066400000000000000000000061011471214654000243130ustar00rootroot00000000000000import functools import sentry_sdk from sentry_sdk.consts import OP from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any try: from asyncio import iscoroutinefunction except ImportError: iscoroutinefunction = None # type: ignore try: from sentry_sdk.integrations.django.asgi import wrap_async_view except (ImportError, SyntaxError): wrap_async_view = None # type: ignore def patch_views(): # type: () -> None from django.core.handlers.base import BaseHandler from django.template.response import SimpleTemplateResponse from sentry_sdk.integrations.django import DjangoIntegration old_make_view_atomic = BaseHandler.make_view_atomic old_render = SimpleTemplateResponse.render def sentry_patched_render(self): # type: (SimpleTemplateResponse) -> Any with sentry_sdk.start_span( op=OP.VIEW_RESPONSE_RENDER, name="serialize response", origin=DjangoIntegration.origin, ): return old_render(self) @functools.wraps(old_make_view_atomic) def sentry_patched_make_view_atomic(self, *args, **kwargs): # type: (Any, *Any, **Any) -> Any callback = old_make_view_atomic(self, *args, **kwargs) # XXX: The wrapper function is created for every request. Find more # efficient way to wrap views (or build a cache?) integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is not None and integration.middleware_spans: is_async_view = ( iscoroutinefunction is not None and wrap_async_view is not None and iscoroutinefunction(callback) ) if is_async_view: sentry_wrapped_callback = wrap_async_view(callback) else: sentry_wrapped_callback = _wrap_sync_view(callback) else: sentry_wrapped_callback = callback return sentry_wrapped_callback SimpleTemplateResponse.render = sentry_patched_render BaseHandler.make_view_atomic = sentry_patched_make_view_atomic def _wrap_sync_view(callback): # type: (Any) -> Any from sentry_sdk.integrations.django import DjangoIntegration @functools.wraps(callback) def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() if current_scope.transaction is not None: current_scope.transaction.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() # set the active thread id to the handler thread for sync views # this isn't necessary for async views since that runs on main if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() with sentry_sdk.start_span( op=OP.VIEW_RENDER, name=request.resolver_match.view_name, origin=DjangoIntegration.origin, ): return callback(request, *args, **kwargs) return sentry_wrapped_callback sentry-python-2.18.0/sentry_sdk/integrations/dramatiq.py000066400000000000000000000127071471214654000235270ustar00rootroot00000000000000import json import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.integrations._wsgi_common import request_body_within_bounds from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, event_from_exception, ) from dramatiq.broker import Broker # type: ignore from dramatiq.message import Message # type: ignore from dramatiq.middleware import Middleware, default_middleware # type: ignore from dramatiq.errors import Retry # type: ignore from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable, Dict, Optional, Union from sentry_sdk._types import Event, Hint class DramatiqIntegration(Integration): """ Dramatiq integration for Sentry Please make sure that you call `sentry_sdk.init` *before* initializing your broker, as it monkey patches `Broker.__init__`. This integration was originally developed and maintained by https://github.com/jacobsvante and later donated to the Sentry project. """ identifier = "dramatiq" @staticmethod def setup_once(): # type: () -> None _patch_dramatiq_broker() def _patch_dramatiq_broker(): # type: () -> None original_broker__init__ = Broker.__init__ def sentry_patched_broker__init__(self, *args, **kw): # type: (Broker, *Any, **Any) -> None integration = sentry_sdk.get_client().get_integration(DramatiqIntegration) try: middleware = kw.pop("middleware") except KeyError: # Unfortunately Broker and StubBroker allows middleware to be # passed in as positional arguments, whilst RabbitmqBroker and # RedisBroker does not. if len(args) == 1: middleware = args[0] args = [] # type: ignore else: middleware = None if middleware is None: middleware = list(m() for m in default_middleware) else: middleware = list(middleware) if integration is not None: middleware = [m for m in middleware if not isinstance(m, SentryMiddleware)] middleware.insert(0, SentryMiddleware()) kw["middleware"] = middleware original_broker__init__(self, *args, **kw) Broker.__init__ = sentry_patched_broker__init__ class SentryMiddleware(Middleware): # type: ignore[misc] """ A Dramatiq middleware that automatically captures and sends exceptions to Sentry. This is automatically added to every instantiated broker via the DramatiqIntegration. """ def before_process_message(self, broker, message): # type: (Broker, Message) -> None integration = sentry_sdk.get_client().get_integration(DramatiqIntegration) if integration is None: return message._scope_manager = sentry_sdk.new_scope() message._scope_manager.__enter__() scope = sentry_sdk.get_current_scope() scope.transaction = message.actor_name scope.set_extra("dramatiq_message_id", message.message_id) scope.add_event_processor(_make_message_event_processor(message, integration)) def after_process_message(self, broker, message, *, result=None, exception=None): # type: (Broker, Message, Any, Optional[Any], Optional[Exception]) -> None integration = sentry_sdk.get_client().get_integration(DramatiqIntegration) if integration is None: return actor = broker.get_actor(message.actor_name) throws = message.options.get("throws") or actor.options.get("throws") try: if ( exception is not None and not (throws and isinstance(exception, throws)) and not isinstance(exception, Retry) ): event, hint = event_from_exception( exception, client_options=sentry_sdk.get_client().options, mechanism={ "type": DramatiqIntegration.identifier, "handled": False, }, ) sentry_sdk.capture_event(event, hint=hint) finally: message._scope_manager.__exit__(None, None, None) def _make_message_event_processor(message, integration): # type: (Message, DramatiqIntegration) -> Callable[[Event, Hint], Optional[Event]] def inner(event, hint): # type: (Event, Hint) -> Optional[Event] with capture_internal_exceptions(): DramatiqMessageExtractor(message).extract_into_event(event) return event return inner class DramatiqMessageExtractor: def __init__(self, message): # type: (Message) -> None self.message_data = dict(message.asdict()) def content_length(self): # type: () -> int return len(json.dumps(self.message_data)) def extract_into_event(self, event): # type: (Event) -> None client = sentry_sdk.get_client() if not client.is_active(): return contexts = event.setdefault("contexts", {}) request_info = contexts.setdefault("dramatiq", {}) request_info["type"] = "dramatiq" data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]] if not request_body_within_bounds(client, self.content_length()): data = AnnotatedValue.removed_because_over_size_limit() else: data = self.message_data request_info["data"] = data sentry-python-2.18.0/sentry_sdk/integrations/excepthook.py000066400000000000000000000045501471214654000240730ustar00rootroot00000000000000import sys import sentry_sdk from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, ) from sentry_sdk.integrations import Integration from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Callable from typing import Any from typing import Type from typing import Optional from types import TracebackType Excepthook = Callable[ [Type[BaseException], BaseException, Optional[TracebackType]], Any, ] class ExcepthookIntegration(Integration): identifier = "excepthook" always_run = False def __init__(self, always_run=False): # type: (bool) -> None if not isinstance(always_run, bool): raise ValueError( "Invalid value for always_run: %s (must be type boolean)" % (always_run,) ) self.always_run = always_run @staticmethod def setup_once(): # type: () -> None sys.excepthook = _make_excepthook(sys.excepthook) def _make_excepthook(old_excepthook): # type: (Excepthook) -> Excepthook def sentry_sdk_excepthook(type_, value, traceback): # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None integration = sentry_sdk.get_client().get_integration(ExcepthookIntegration) # Note: If we replace this with ensure_integration_enabled then # we break the exceptiongroup backport; # See: https://github.com/getsentry/sentry-python/issues/3097 if integration is None: return old_excepthook(type_, value, traceback) if _should_send(integration.always_run): with capture_internal_exceptions(): event, hint = event_from_exception( (type_, value, traceback), client_options=sentry_sdk.get_client().options, mechanism={"type": "excepthook", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) return old_excepthook(type_, value, traceback) return sentry_sdk_excepthook def _should_send(always_run=False): # type: (bool) -> bool if always_run: return True if hasattr(sys, "ps1"): # Disable the excepthook for interactive Python shells, otherwise # every typo gets sent to Sentry. return False return True sentry-python-2.18.0/sentry_sdk/integrations/executing.py000066400000000000000000000037121471214654000237140ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.scope import add_global_event_processor from sentry_sdk.utils import walk_exception_chain, iter_stacks from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional from sentry_sdk._types import Event, Hint try: import executing except ImportError: raise DidNotEnable("executing is not installed") class ExecutingIntegration(Integration): identifier = "executing" @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def add_executing_info(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] if sentry_sdk.get_client().get_integration(ExecutingIntegration) is None: return event if hint is None: return event exc_info = hint.get("exc_info", None) if exc_info is None: return event exception = event.get("exception", None) if exception is None: return event values = exception.get("values", None) if values is None: return event for exception, (_exc_type, _exc_value, exc_tb) in zip( reversed(values), walk_exception_chain(exc_info) ): sentry_frames = [ frame for frame in exception.get("stacktrace", {}).get("frames", []) if frame.get("function") ] tbs = list(iter_stacks(exc_tb)) if len(sentry_frames) != len(tbs): continue for sentry_frame, tb in zip(sentry_frames, tbs): frame = tb.tb_frame source = executing.Source.for_frame(frame) sentry_frame["function"] = source.code_qualname(frame.f_code) return event sentry-python-2.18.0/sentry_sdk/integrations/falcon.py000066400000000000000000000222641471214654000231660ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, parse_version, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Optional from sentry_sdk._types import Event, EventProcessor # In Falcon 3.0 `falcon.api_helpers` is renamed to `falcon.app_helpers` # and `falcon.API` to `falcon.App` try: import falcon # type: ignore from falcon import __version__ as FALCON_VERSION except ImportError: raise DidNotEnable("Falcon not installed") try: import falcon.app_helpers # type: ignore falcon_helpers = falcon.app_helpers falcon_app_class = falcon.App FALCON3 = True except ImportError: import falcon.api_helpers # type: ignore falcon_helpers = falcon.api_helpers falcon_app_class = falcon.API FALCON3 = False class FalconRequestExtractor(RequestExtractor): def env(self): # type: () -> Dict[str, Any] return self.request.env def cookies(self): # type: () -> Dict[str, Any] return self.request.cookies def form(self): # type: () -> None return None # No such concept in Falcon def files(self): # type: () -> None return None # No such concept in Falcon def raw_data(self): # type: () -> Optional[str] # As request data can only be read once we won't make this available # to Sentry. Just send back a dummy string in case there was a # content length. # TODO(jmagnusson): Figure out if there's a way to support this content_length = self.content_length() if content_length > 0: return "[REQUEST_CONTAINING_RAW_DATA]" else: return None if FALCON3: def json(self): # type: () -> Optional[Dict[str, Any]] try: return self.request.media except falcon.errors.HTTPBadRequest: return None else: def json(self): # type: () -> Optional[Dict[str, Any]] try: return self.request.media except falcon.errors.HTTPBadRequest: # NOTE(jmagnusson): We return `falcon.Request._media` here because # falcon 1.4 doesn't do proper type checking in # `falcon.Request.media`. This has been fixed in 2.0. # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953 return self.request._media class SentryFalconMiddleware: """Captures exceptions in Falcon requests and send to Sentry""" def process_request(self, req, resp, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> None integration = sentry_sdk.get_client().get_integration(FalconIntegration) if integration is None: return scope = sentry_sdk.get_isolation_scope() scope._name = "falcon" scope.add_event_processor(_make_request_event_processor(req, integration)) TRANSACTION_STYLE_VALUES = ("uri_template", "path") class FalconIntegration(Integration): identifier = "falcon" origin = f"auto.http.{identifier}" transaction_style = "" def __init__(self, transaction_style="uri_template"): # type: (str) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style @staticmethod def setup_once(): # type: () -> None version = parse_version(FALCON_VERSION) if version is None: raise DidNotEnable("Unparsable Falcon version: {}".format(FALCON_VERSION)) if version < (1, 4): raise DidNotEnable("Falcon 1.4 or newer required.") _patch_wsgi_app() _patch_handle_exception() _patch_prepare_middleware() def _patch_wsgi_app(): # type: () -> None original_wsgi_app = falcon_app_class.__call__ def sentry_patched_wsgi_app(self, env, start_response): # type: (falcon.API, Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(FalconIntegration) if integration is None: return original_wsgi_app(self, env, start_response) sentry_wrapped = SentryWsgiMiddleware( lambda envi, start_resp: original_wsgi_app(self, envi, start_resp), span_origin=FalconIntegration.origin, ) return sentry_wrapped(env, start_response) falcon_app_class.__call__ = sentry_patched_wsgi_app def _patch_handle_exception(): # type: () -> None original_handle_exception = falcon_app_class._handle_exception @ensure_integration_enabled(FalconIntegration, original_handle_exception) def sentry_patched_handle_exception(self, *args): # type: (falcon.API, *Any) -> Any # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception # method signature from `(ex, req, resp, params)` to # `(req, resp, ex, params)` ex = response = None with capture_internal_exceptions(): ex = next(argument for argument in args if isinstance(argument, Exception)) response = next( argument for argument in args if isinstance(argument, falcon.Response) ) was_handled = original_handle_exception(self, *args) if ex is None or response is None: # Both ex and response should have a non-None value at this point; otherwise, # there is an error with the SDK that will have been captured in the # capture_internal_exceptions block above. return was_handled if _exception_leads_to_http_5xx(ex, response): event, hint = event_from_exception( ex, client_options=sentry_sdk.get_client().options, mechanism={"type": "falcon", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) return was_handled falcon_app_class._handle_exception = sentry_patched_handle_exception def _patch_prepare_middleware(): # type: () -> None original_prepare_middleware = falcon_helpers.prepare_middleware def sentry_patched_prepare_middleware( middleware=None, independent_middleware=False, asgi=False ): # type: (Any, Any, bool) -> Any if asgi: # We don't support ASGI Falcon apps, so we don't patch anything here return original_prepare_middleware(middleware, independent_middleware, asgi) integration = sentry_sdk.get_client().get_integration(FalconIntegration) if integration is not None: middleware = [SentryFalconMiddleware()] + (middleware or []) # We intentionally omit the asgi argument here, since the default is False anyways, # and this way, we remain backwards-compatible with pre-3.0.0 Falcon versions. return original_prepare_middleware(middleware, independent_middleware) falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware def _exception_leads_to_http_5xx(ex, response): # type: (Exception, falcon.Response) -> bool is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith( "5" ) is_unhandled_error = not isinstance( ex, (falcon.HTTPError, falcon.http_status.HTTPStatus) ) # We only check the HTTP status on Falcon 3 because in Falcon 2, the status on the response # at the stage where we capture it is listed as 200, even though we would expect to see a 500 # status. Since at the time of this change, Falcon 2 is ca. 4 years old, we have decided to # only perform this check on Falcon 3+, despite the risk that some handled errors might be # reported to Sentry as unhandled on Falcon 2. return (is_server_error or is_unhandled_error) and ( not FALCON3 or _has_http_5xx_status(response) ) def _has_http_5xx_status(response): # type: (falcon.Response) -> bool return response.status.startswith("5") def _set_transaction_name_and_source(event, transaction_style, request): # type: (Event, str, falcon.Request) -> None name_for_style = { "uri_template": request.uri_template, "path": request.path, } event["transaction"] = name_for_style[transaction_style] event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} def _make_request_event_processor(req, integration): # type: (falcon.Request, FalconIntegration) -> EventProcessor def event_processor(event, hint): # type: (Event, dict[str, Any]) -> Event _set_transaction_name_and_source(event, integration.transaction_style, req) with capture_internal_exceptions(): FalconRequestExtractor(req).extract_into_event(event) return event return event_processor sentry-python-2.18.0/sentry_sdk/integrations/fastapi.py000066400000000000000000000111661471214654000233520ustar00rootroot00000000000000import asyncio from copy import deepcopy from functools import wraps import sentry_sdk from sentry_sdk.integrations import DidNotEnable from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE from sentry_sdk.utils import ( transaction_from_function, logger, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable, Dict from sentry_sdk._types import Event try: from sentry_sdk.integrations.starlette import ( StarletteIntegration, StarletteRequestExtractor, ) except DidNotEnable: raise DidNotEnable("Starlette is not installed") try: import fastapi # type: ignore except ImportError: raise DidNotEnable("FastAPI is not installed") _DEFAULT_TRANSACTION_NAME = "generic FastAPI request" class FastApiIntegration(StarletteIntegration): identifier = "fastapi" @staticmethod def setup_once(): # type: () -> None patch_get_request_handler() def _set_transaction_name_and_source(scope, transaction_style, request): # type: (sentry_sdk.Scope, str, Any) -> None name = "" if transaction_style == "endpoint": endpoint = request.scope.get("endpoint") if endpoint: name = transaction_from_function(endpoint) or "" elif transaction_style == "url": route = request.scope.get("route") if route: path = getattr(route, "path", None) if path is not None: name = path if not name: name = _DEFAULT_TRANSACTION_NAME source = TRANSACTION_SOURCE_ROUTE else: source = SOURCE_FOR_STYLE[transaction_style] scope.set_transaction_name(name, source=source) logger.debug( "[FastAPI] Set transaction name and source on scope: %s / %s", name, source ) def patch_get_request_handler(): # type: () -> None old_get_request_handler = fastapi.routing.get_request_handler def _sentry_get_request_handler(*args, **kwargs): # type: (*Any, **Any) -> Any dependant = kwargs.get("dependant") if ( dependant and dependant.call is not None and not asyncio.iscoroutinefunction(dependant.call) ): old_call = dependant.call @wraps(old_call) def _sentry_call(*args, **kwargs): # type: (*Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() if current_scope.transaction is not None: current_scope.transaction.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() return old_call(*args, **kwargs) dependant.call = _sentry_call old_app = old_get_request_handler(*args, **kwargs) async def _sentry_app(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(FastApiIntegration) if integration is None: return await old_app(*args, **kwargs) request = args[0] _set_transaction_name_and_source( sentry_sdk.get_current_scope(), integration.transaction_style, request ) sentry_scope = sentry_sdk.get_isolation_scope() extractor = StarletteRequestExtractor(request) info = await extractor.extract_request_info() def _make_request_event_processor(req, integration): # type: (Any, Any) -> Callable[[Event, Dict[str, Any]], Event] def event_processor(event, hint): # type: (Event, Dict[str, Any]) -> Event # Extract information from request request_info = event.get("request", {}) if info: if "cookies" in info and should_send_default_pii(): request_info["cookies"] = info["cookies"] if "data" in info: request_info["data"] = info["data"] event["request"] = deepcopy(request_info) return event return event_processor sentry_scope._name = FastApiIntegration.identifier sentry_scope.add_event_processor( _make_request_event_processor(request, integration) ) return await old_app(*args, **kwargs) return _sentry_app fastapi.routing.get_request_handler = _sentry_get_request_handler sentry-python-2.18.0/sentry_sdk/integrations/flask.py000066400000000000000000000203121471214654000230140ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, RequestExtractor, ) from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, package_version, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable, Dict, Union from sentry_sdk._types import Event, EventProcessor from sentry_sdk.integrations.wsgi import _ScopedResponse from werkzeug.datastructures import FileStorage, ImmutableMultiDict try: import flask_login # type: ignore except ImportError: flask_login = None try: from flask import Flask, Request # type: ignore from flask import request as flask_request from flask.signals import ( before_render_template, got_request_exception, request_started, ) from markupsafe import Markup except ImportError: raise DidNotEnable("Flask is not installed") try: import blinker # noqa except ImportError: raise DidNotEnable("blinker is not installed") TRANSACTION_STYLE_VALUES = ("endpoint", "url") class FlaskIntegration(Integration): identifier = "flask" origin = f"auto.http.{identifier}" transaction_style = "" def __init__( self, transaction_style="endpoint", # type: str http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] ): # type: (...) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture)) @staticmethod def setup_once(): # type: () -> None version = package_version("flask") if version is None: raise DidNotEnable("Unparsable Flask version.") if version < (0, 10): raise DidNotEnable("Flask 0.10 or newer is required.") before_render_template.connect(_add_sentry_trace) request_started.connect(_request_started) got_request_exception.connect(_capture_exception) old_app = Flask.__call__ def sentry_patched_wsgi_app(self, environ, start_response): # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse if sentry_sdk.get_client().get_integration(FlaskIntegration) is None: return old_app(self, environ, start_response) integration = sentry_sdk.get_client().get_integration(FlaskIntegration) middleware = SentryWsgiMiddleware( lambda *a, **kw: old_app(self, *a, **kw), span_origin=FlaskIntegration.origin, http_methods_to_capture=( integration.http_methods_to_capture if integration else DEFAULT_HTTP_METHODS_TO_CAPTURE ), ) return middleware(environ, start_response) Flask.__call__ = sentry_patched_wsgi_app def _add_sentry_trace(sender, template, context, **extra): # type: (Flask, Any, Dict[str, Any], **Any) -> None if "sentry_trace" in context: return scope = sentry_sdk.get_current_scope() trace_meta = Markup(scope.trace_propagation_meta()) context["sentry_trace"] = trace_meta # for backwards compatibility context["sentry_trace_meta"] = trace_meta def _set_transaction_name_and_source(scope, transaction_style, request): # type: (sentry_sdk.Scope, str, Request) -> None try: name_for_style = { "url": request.url_rule.rule, "endpoint": request.url_rule.endpoint, } scope.set_transaction_name( name_for_style[transaction_style], source=SOURCE_FOR_STYLE[transaction_style], ) except Exception: pass def _request_started(app, **kwargs): # type: (Flask, **Any) -> None integration = sentry_sdk.get_client().get_integration(FlaskIntegration) if integration is None: return request = flask_request._get_current_object() # Set the transaction name and source here, # but rely on WSGI middleware to actually start the transaction _set_transaction_name_and_source( sentry_sdk.get_current_scope(), integration.transaction_style, request ) scope = sentry_sdk.get_isolation_scope() evt_processor = _make_request_event_processor(app, request, integration) scope.add_event_processor(evt_processor) class FlaskRequestExtractor(RequestExtractor): def env(self): # type: () -> Dict[str, str] return self.request.environ def cookies(self): # type: () -> Dict[Any, Any] return { k: v[0] if isinstance(v, list) and len(v) == 1 else v for k, v in self.request.cookies.items() } def raw_data(self): # type: () -> bytes return self.request.get_data() def form(self): # type: () -> ImmutableMultiDict[str, Any] return self.request.form def files(self): # type: () -> ImmutableMultiDict[str, Any] return self.request.files def is_json(self): # type: () -> bool return self.request.is_json def json(self): # type: () -> Any return self.request.get_json(silent=True) def size_of_file(self, file): # type: (FileStorage) -> int return file.content_length def _make_request_event_processor(app, request, integration): # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor def inner(event, hint): # type: (Event, dict[str, Any]) -> Event # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. if request is None: return event with capture_internal_exceptions(): FlaskRequestExtractor(request).extract_into_event(event) if should_send_default_pii(): with capture_internal_exceptions(): _add_user_to_event(event) return event return inner @ensure_integration_enabled(FlaskIntegration) def _capture_exception(sender, exception, **kwargs): # type: (Flask, Union[ValueError, BaseException], **Any) -> None event, hint = event_from_exception( exception, client_options=sentry_sdk.get_client().options, mechanism={"type": "flask", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _add_user_to_event(event): # type: (Event) -> None if flask_login is None: return user = flask_login.current_user if user is None: return with capture_internal_exceptions(): # Access this object as late as possible as accessing the user # is relatively costly user_info = event.setdefault("user", {}) try: user_info.setdefault("id", user.get_id()) # TODO: more configurable user attrs here except AttributeError: # might happen if: # - flask_login could not be imported # - flask_login is not configured # - no user is logged in pass # The following attribute accesses are ineffective for the general # Flask-Login case, because the User interface of Flask-Login does not # care about anything but the ID. However, Flask-User (based on # Flask-Login) documents a few optional extra attributes. # # https://github.com/lingthio/Flask-User/blob/a379fa0a281789618c484b459cb41236779b95b1/docs/source/data_models.rst#fixed-data-model-property-names try: user_info.setdefault("email", user.email) except Exception: pass try: user_info.setdefault("username", user.username) except Exception: pass sentry-python-2.18.0/sentry_sdk/integrations/gcp.py000066400000000000000000000201361471214654000224710ustar00rootroot00000000000000import functools import sys from copy import deepcopy from datetime import datetime, timedelta, timezone from os import environ import sentry_sdk from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, event_from_exception, logger, TimeoutThread, reraise, ) from typing import TYPE_CHECKING # Constants TIMEOUT_WARNING_BUFFER = 1.5 # Buffer time required to send timeout warning to Sentry MILLIS_TO_SECONDS = 1000.0 if TYPE_CHECKING: from typing import Any from typing import TypeVar from typing import Callable from typing import Optional from sentry_sdk._types import EventProcessor, Event, Hint F = TypeVar("F", bound=Callable[..., Any]) def _wrap_func(func): # type: (F) -> F @functools.wraps(func) def sentry_func(functionhandler, gcp_event, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> Any client = sentry_sdk.get_client() integration = client.get_integration(GcpIntegration) if integration is None: return func(functionhandler, gcp_event, *args, **kwargs) configured_time = environ.get("FUNCTION_TIMEOUT_SEC") if not configured_time: logger.debug( "The configured timeout could not be fetched from Cloud Functions configuration." ) return func(functionhandler, gcp_event, *args, **kwargs) configured_time = int(configured_time) initial_time = datetime.now(timezone.utc) with sentry_sdk.isolation_scope() as scope: with capture_internal_exceptions(): scope.clear_breadcrumbs() scope.add_event_processor( _make_request_event_processor( gcp_event, configured_time, initial_time ) ) scope.set_tag("gcp_region", environ.get("FUNCTION_REGION")) timeout_thread = None if ( integration.timeout_warning and configured_time > TIMEOUT_WARNING_BUFFER ): waiting_time = configured_time - TIMEOUT_WARNING_BUFFER timeout_thread = TimeoutThread(waiting_time, configured_time) # Starting the thread to raise timeout warning exception timeout_thread.start() headers = {} if hasattr(gcp_event, "headers"): headers = gcp_event.headers transaction = continue_trace( headers, op=OP.FUNCTION_GCP, name=environ.get("FUNCTION_NAME", ""), source=TRANSACTION_SOURCE_COMPONENT, origin=GcpIntegration.origin, ) sampling_context = { "gcp_env": { "function_name": environ.get("FUNCTION_NAME"), "function_entry_point": environ.get("ENTRY_POINT"), "function_identity": environ.get("FUNCTION_IDENTITY"), "function_region": environ.get("FUNCTION_REGION"), "function_project": environ.get("GCP_PROJECT"), }, "gcp_event": gcp_event, } with sentry_sdk.start_transaction( transaction, custom_sampling_context=sampling_context ): try: return func(functionhandler, gcp_event, *args, **kwargs) except Exception: exc_info = sys.exc_info() sentry_event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "gcp", "handled": False}, ) sentry_sdk.capture_event(sentry_event, hint=hint) reraise(*exc_info) finally: if timeout_thread: timeout_thread.stop() # Flush out the event queue client.flush() return sentry_func # type: ignore class GcpIntegration(Integration): identifier = "gcp" origin = f"auto.function.{identifier}" def __init__(self, timeout_warning=False): # type: (bool) -> None self.timeout_warning = timeout_warning @staticmethod def setup_once(): # type: () -> None import __main__ as gcp_functions if not hasattr(gcp_functions, "worker_v1"): logger.warning( "GcpIntegration currently supports only Python 3.7 runtime environment." ) return worker1 = gcp_functions.worker_v1 worker1.FunctionHandler.invoke_user_function = _wrap_func( worker1.FunctionHandler.invoke_user_function ) def _make_request_event_processor(gcp_event, configured_timeout, initial_time): # type: (Any, Any, Any) -> EventProcessor def event_processor(event, hint): # type: (Event, Hint) -> Optional[Event] final_time = datetime.now(timezone.utc) time_diff = final_time - initial_time execution_duration_in_millis = time_diff / timedelta(milliseconds=1) extra = event.setdefault("extra", {}) extra["google cloud functions"] = { "function_name": environ.get("FUNCTION_NAME"), "function_entry_point": environ.get("ENTRY_POINT"), "function_identity": environ.get("FUNCTION_IDENTITY"), "function_region": environ.get("FUNCTION_REGION"), "function_project": environ.get("GCP_PROJECT"), "execution_duration_in_millis": execution_duration_in_millis, "configured_timeout_in_seconds": configured_timeout, } extra["google cloud logs"] = { "url": _get_google_cloud_logs_url(final_time), } request = event.get("request", {}) request["url"] = "gcp:///{}".format(environ.get("FUNCTION_NAME")) if hasattr(gcp_event, "method"): request["method"] = gcp_event.method if hasattr(gcp_event, "query_string"): request["query_string"] = gcp_event.query_string.decode("utf-8") if hasattr(gcp_event, "headers"): request["headers"] = _filter_headers(gcp_event.headers) if should_send_default_pii(): if hasattr(gcp_event, "data"): request["data"] = gcp_event.data else: if hasattr(gcp_event, "data"): # Unfortunately couldn't find a way to get structured body from GCP # event. Meaning every body is unstructured to us. request["data"] = AnnotatedValue.removed_because_raw_data() event["request"] = deepcopy(request) return event return event_processor def _get_google_cloud_logs_url(final_time): # type: (datetime) -> str """ Generates a Google Cloud Logs console URL based on the environment variables Arguments: final_time {datetime} -- Final time Returns: str -- Google Cloud Logs Console URL to logs. """ hour_ago = final_time - timedelta(hours=1) formatstring = "%Y-%m-%dT%H:%M:%SZ" url = ( "https://console.cloud.google.com/logs/viewer?project={project}&resource=cloud_function" "%2Ffunction_name%2F{function_name}%2Fregion%2F{region}&minLogLevel=0&expandAll=false" "×tamp={timestamp_end}&customFacets=&limitCustomFacetWidth=true" "&dateRangeStart={timestamp_start}&dateRangeEnd={timestamp_end}" "&interval=PT1H&scrollTimestamp={timestamp_end}" ).format( project=environ.get("GCP_PROJECT"), function_name=environ.get("FUNCTION_NAME"), region=environ.get("FUNCTION_REGION"), timestamp_end=final_time.strftime(formatstring), timestamp_start=hour_ago.strftime(formatstring), ) return url sentry-python-2.18.0/sentry_sdk/integrations/gnu_backtrace.py000066400000000000000000000055161471214654000245150ustar00rootroot00000000000000import re import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor from sentry_sdk.utils import capture_internal_exceptions from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from sentry_sdk._types import Event MODULE_RE = r"[a-zA-Z0-9/._:\\-]+" TYPE_RE = r"[a-zA-Z0-9._:<>,-]+" HEXVAL_RE = r"[A-Fa-f0-9]+" FRAME_RE = r""" ^(?P\d+)\.\s (?P{MODULE_RE})\( (?P{TYPE_RE}\ )? ((?P{TYPE_RE}) (?P\(.*\))? )? ((?P\ const)?\+0x(?P{HEXVAL_RE}))? \)\s \[0x(?P{HEXVAL_RE})\]$ """.format( MODULE_RE=MODULE_RE, HEXVAL_RE=HEXVAL_RE, TYPE_RE=TYPE_RE ) FRAME_RE = re.compile(FRAME_RE, re.MULTILINE | re.VERBOSE) class GnuBacktraceIntegration(Integration): identifier = "gnu_backtrace" @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def process_gnu_backtrace(event, hint): # type: (Event, dict[str, Any]) -> Event with capture_internal_exceptions(): return _process_gnu_backtrace(event, hint) def _process_gnu_backtrace(event, hint): # type: (Event, dict[str, Any]) -> Event if sentry_sdk.get_client().get_integration(GnuBacktraceIntegration) is None: return event exc_info = hint.get("exc_info", None) if exc_info is None: return event exception = event.get("exception", None) if exception is None: return event values = exception.get("values", None) if values is None: return event for exception in values: frames = exception.get("stacktrace", {}).get("frames", []) if not frames: continue msg = exception.get("value", None) if not msg: continue additional_frames = [] new_msg = [] for line in msg.splitlines(): match = FRAME_RE.match(line) if match: additional_frames.append( ( int(match.group("index")), { "package": match.group("package") or None, "function": match.group("function") or None, "platform": "native", }, ) ) else: # Put garbage lines back into message, not sure what to do with them. new_msg.append(line) if additional_frames: additional_frames.sort(key=lambda x: -x[0]) for _, frame in additional_frames: frames.append(frame) new_msg.append("") exception["value"] = "\n".join(new_msg) return event sentry-python-2.18.0/sentry_sdk/integrations/gql.py000066400000000000000000000104251471214654000225030ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.utils import ( event_from_exception, ensure_integration_enabled, parse_version, ) from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii try: import gql # type: ignore[import-not-found] from graphql import print_ast, get_operation_ast, DocumentNode, VariableDefinitionNode # type: ignore[import-not-found] from gql.transport import Transport, AsyncTransport # type: ignore[import-not-found] from gql.transport.exceptions import TransportQueryError # type: ignore[import-not-found] except ImportError: raise DidNotEnable("gql is not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Dict, Tuple, Union from sentry_sdk._types import Event, EventProcessor EventDataType = Dict[str, Union[str, Tuple[VariableDefinitionNode, ...]]] MIN_GQL_VERSION = (3, 4, 1) class GQLIntegration(Integration): identifier = "gql" @staticmethod def setup_once(): # type: () -> None gql_version = parse_version(gql.__version__) if gql_version is None or gql_version < MIN_GQL_VERSION: raise DidNotEnable( "GQLIntegration is only supported for GQL versions %s and above." % ".".join(str(num) for num in MIN_GQL_VERSION) ) _patch_execute() def _data_from_document(document): # type: (DocumentNode) -> EventDataType try: operation_ast = get_operation_ast(document) data = {"query": print_ast(document)} # type: EventDataType if operation_ast is not None: data["variables"] = operation_ast.variable_definitions if operation_ast.name is not None: data["operationName"] = operation_ast.name.value return data except (AttributeError, TypeError): return dict() def _transport_method(transport): # type: (Union[Transport, AsyncTransport]) -> str """ The RequestsHTTPTransport allows defining the HTTP method; all other transports use POST. """ try: return transport.method except AttributeError: return "POST" def _request_info_from_transport(transport): # type: (Union[Transport, AsyncTransport, None]) -> Dict[str, str] if transport is None: return {} request_info = { "method": _transport_method(transport), } try: request_info["url"] = transport.url except AttributeError: pass return request_info def _patch_execute(): # type: () -> None real_execute = gql.Client.execute @ensure_integration_enabled(GQLIntegration, real_execute) def sentry_patched_execute(self, document, *args, **kwargs): # type: (gql.Client, DocumentNode, Any, Any) -> Any scope = sentry_sdk.get_isolation_scope() scope.add_event_processor(_make_gql_event_processor(self, document)) try: return real_execute(self, document, *args, **kwargs) except TransportQueryError as e: event, hint = event_from_exception( e, client_options=sentry_sdk.get_client().options, mechanism={"type": "gql", "handled": False}, ) sentry_sdk.capture_event(event, hint) raise e gql.Client.execute = sentry_patched_execute def _make_gql_event_processor(client, document): # type: (gql.Client, DocumentNode) -> EventProcessor def processor(event, hint): # type: (Event, dict[str, Any]) -> Event try: errors = hint["exc_info"][1].errors except (AttributeError, KeyError): errors = None request = event.setdefault("request", {}) request.update( { "api_target": "graphql", **_request_info_from_transport(client.transport), } ) if should_send_default_pii(): request["data"] = _data_from_document(document) contexts = event.setdefault("contexts", {}) response = contexts.setdefault("response", {}) response.update( { "data": {"errors": errors}, "type": response, } ) return event return processor sentry-python-2.18.0/sentry_sdk/integrations/graphene.py000066400000000000000000000120711471214654000235100ustar00rootroot00000000000000from contextlib import contextmanager import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, package_version, ) try: from graphene.types import schema as graphene_schema # type: ignore except ImportError: raise DidNotEnable("graphene is not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Generator from typing import Any, Dict, Union from graphene.language.source import Source # type: ignore from graphql.execution import ExecutionResult # type: ignore from graphql.type import GraphQLSchema # type: ignore from sentry_sdk._types import Event class GrapheneIntegration(Integration): identifier = "graphene" @staticmethod def setup_once(): # type: () -> None version = package_version("graphene") if version is None: raise DidNotEnable("Unparsable graphene version.") if version < (3, 3): raise DidNotEnable("graphene 3.3 or newer required.") _patch_graphql() def _patch_graphql(): # type: () -> None old_graphql_sync = graphene_schema.graphql_sync old_graphql_async = graphene_schema.graphql @ensure_integration_enabled(GrapheneIntegration, old_graphql_sync) def _sentry_patched_graphql_sync(schema, source, *args, **kwargs): # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult scope = sentry_sdk.get_isolation_scope() scope.add_event_processor(_event_processor) with graphql_span(schema, source, kwargs): result = old_graphql_sync(schema, source, *args, **kwargs) with capture_internal_exceptions(): client = sentry_sdk.get_client() for error in result.errors or []: event, hint = event_from_exception( error, client_options=client.options, mechanism={ "type": GrapheneIntegration.identifier, "handled": False, }, ) sentry_sdk.capture_event(event, hint=hint) return result async def _sentry_patched_graphql_async(schema, source, *args, **kwargs): # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult integration = sentry_sdk.get_client().get_integration(GrapheneIntegration) if integration is None: return await old_graphql_async(schema, source, *args, **kwargs) scope = sentry_sdk.get_isolation_scope() scope.add_event_processor(_event_processor) with graphql_span(schema, source, kwargs): result = await old_graphql_async(schema, source, *args, **kwargs) with capture_internal_exceptions(): client = sentry_sdk.get_client() for error in result.errors or []: event, hint = event_from_exception( error, client_options=client.options, mechanism={ "type": GrapheneIntegration.identifier, "handled": False, }, ) sentry_sdk.capture_event(event, hint=hint) return result graphene_schema.graphql_sync = _sentry_patched_graphql_sync graphene_schema.graphql = _sentry_patched_graphql_async def _event_processor(event, hint): # type: (Event, Dict[str, Any]) -> Event if should_send_default_pii(): request_info = event.setdefault("request", {}) request_info["api_target"] = "graphql" elif event.get("request", {}).get("data"): del event["request"]["data"] return event @contextmanager def graphql_span(schema, source, kwargs): # type: (GraphQLSchema, Union[str, Source], Dict[str, Any]) -> Generator[None, None, None] operation_name = kwargs.get("operation_name") operation_type = "query" op = OP.GRAPHQL_QUERY if source.strip().startswith("mutation"): operation_type = "mutation" op = OP.GRAPHQL_MUTATION elif source.strip().startswith("subscription"): operation_type = "subscription" op = OP.GRAPHQL_SUBSCRIPTION sentry_sdk.add_breadcrumb( crumb={ "data": { "operation_name": operation_name, "operation_type": operation_type, }, "category": "graphql.operation", }, ) scope = sentry_sdk.get_current_scope() if scope.span: _graphql_span = scope.span.start_child(op=op, name=operation_name) else: _graphql_span = sentry_sdk.start_span(op=op, name=operation_name) _graphql_span.set_data("graphql.document", source) _graphql_span.set_data("graphql.operation.name", operation_name) _graphql_span.set_data("graphql.operation.type", operation_type) try: yield finally: _graphql_span.finish() sentry-python-2.18.0/sentry_sdk/integrations/grpc/000077500000000000000000000000001471214654000222775ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/integrations/grpc/__init__.py000066400000000000000000000115261471214654000244150ustar00rootroot00000000000000from functools import wraps import grpc from grpc import Channel, Server, intercept_channel from grpc.aio import Channel as AsyncChannel from grpc.aio import Server as AsyncServer from sentry_sdk.integrations import Integration from .client import ClientInterceptor from .server import ServerInterceptor from .aio.server import ServerInterceptor as AsyncServerInterceptor from .aio.client import ( SentryUnaryUnaryClientInterceptor as AsyncUnaryUnaryClientInterceptor, ) from .aio.client import ( SentryUnaryStreamClientInterceptor as AsyncUnaryStreamClientIntercetor, ) from typing import TYPE_CHECKING, Any, Optional, Sequence # Hack to get new Python features working in older versions # without introducing a hard dependency on `typing_extensions` # from: https://stackoverflow.com/a/71944042/300572 if TYPE_CHECKING: from typing import ParamSpec, Callable else: # Fake ParamSpec class ParamSpec: def __init__(self, _): self.args = None self.kwargs = None # Callable[anything] will return None class _Callable: def __getitem__(self, _): return None # Make instances Callable = _Callable() P = ParamSpec("P") def _wrap_channel_sync(func: Callable[P, Channel]) -> Callable[P, Channel]: "Wrapper for synchronous secure and insecure channel." @wraps(func) def patched_channel(*args: Any, **kwargs: Any) -> Channel: channel = func(*args, **kwargs) if not ClientInterceptor._is_intercepted: ClientInterceptor._is_intercepted = True return intercept_channel(channel, ClientInterceptor()) else: return channel return patched_channel def _wrap_intercept_channel(func: Callable[P, Channel]) -> Callable[P, Channel]: @wraps(func) def patched_intercept_channel( channel: Channel, *interceptors: grpc.ServerInterceptor ) -> Channel: if ClientInterceptor._is_intercepted: interceptors = tuple( [ interceptor for interceptor in interceptors if not isinstance(interceptor, ClientInterceptor) ] ) else: interceptors = interceptors return intercept_channel(channel, *interceptors) return patched_intercept_channel # type: ignore def _wrap_channel_async(func: Callable[P, AsyncChannel]) -> Callable[P, AsyncChannel]: "Wrapper for asynchronous secure and insecure channel." @wraps(func) def patched_channel( *args: P.args, interceptors: Optional[Sequence[grpc.aio.ClientInterceptor]] = None, **kwargs: P.kwargs, ) -> Channel: sentry_interceptors = [ AsyncUnaryUnaryClientInterceptor(), AsyncUnaryStreamClientIntercetor(), ] interceptors = [*sentry_interceptors, *(interceptors or [])] return func(*args, interceptors=interceptors, **kwargs) # type: ignore return patched_channel # type: ignore def _wrap_sync_server(func: Callable[P, Server]) -> Callable[P, Server]: """Wrapper for synchronous server.""" @wraps(func) def patched_server( *args: P.args, interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None, **kwargs: P.kwargs, ) -> Server: interceptors = [ interceptor for interceptor in interceptors or [] if not isinstance(interceptor, ServerInterceptor) ] server_interceptor = ServerInterceptor() interceptors = [server_interceptor, *(interceptors or [])] return func(*args, interceptors=interceptors, **kwargs) # type: ignore return patched_server # type: ignore def _wrap_async_server(func: Callable[P, AsyncServer]) -> Callable[P, AsyncServer]: """Wrapper for asynchronous server.""" @wraps(func) def patched_aio_server( *args: P.args, interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None, **kwargs: P.kwargs, ) -> Server: server_interceptor = AsyncServerInterceptor() interceptors = (server_interceptor, *(interceptors or [])) return func(*args, interceptors=interceptors, **kwargs) # type: ignore return patched_aio_server # type: ignore class GRPCIntegration(Integration): identifier = "grpc" @staticmethod def setup_once() -> None: import grpc grpc.insecure_channel = _wrap_channel_sync(grpc.insecure_channel) grpc.secure_channel = _wrap_channel_sync(grpc.secure_channel) grpc.intercept_channel = _wrap_intercept_channel(grpc.intercept_channel) grpc.aio.insecure_channel = _wrap_channel_async(grpc.aio.insecure_channel) grpc.aio.secure_channel = _wrap_channel_async(grpc.aio.secure_channel) grpc.server = _wrap_sync_server(grpc.server) grpc.aio.server = _wrap_async_server(grpc.aio.server) sentry-python-2.18.0/sentry_sdk/integrations/grpc/aio/000077500000000000000000000000001471214654000230475ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/integrations/grpc/aio/__init__.py000066400000000000000000000002151471214654000251560ustar00rootroot00000000000000from .server import ServerInterceptor from .client import ClientInterceptor __all__ = [ "ClientInterceptor", "ServerInterceptor", ] sentry-python-2.18.0/sentry_sdk/integrations/grpc/aio/client.py000066400000000000000000000062221471214654000247010ustar00rootroot00000000000000from typing import Callable, Union, AsyncIterable, Any from grpc.aio import ( UnaryUnaryClientInterceptor, UnaryStreamClientInterceptor, ClientCallDetails, UnaryUnaryCall, UnaryStreamCall, ) from google.protobuf.message import Message import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN class ClientInterceptor: @staticmethod def _update_client_call_details_metadata_from_scope( client_call_details: ClientCallDetails, ) -> ClientCallDetails: metadata = ( list(client_call_details.metadata) if client_call_details.metadata else [] ) for ( key, value, ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(): metadata.append((key, value)) client_call_details = ClientCallDetails( method=client_call_details.method, timeout=client_call_details.timeout, metadata=metadata, credentials=client_call_details.credentials, wait_for_ready=client_call_details.wait_for_ready, ) return client_call_details class SentryUnaryUnaryClientInterceptor(ClientInterceptor, UnaryUnaryClientInterceptor): # type: ignore async def intercept_unary_unary( self, continuation: Callable[[ClientCallDetails, Message], UnaryUnaryCall], client_call_details: ClientCallDetails, request: Message, ) -> Union[UnaryUnaryCall, Message]: method = client_call_details.method with sentry_sdk.start_span( op=OP.GRPC_CLIENT, name="unary unary call to %s" % method.decode(), origin=SPAN_ORIGIN, ) as span: span.set_data("type", "unary unary") span.set_data("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details ) response = await continuation(client_call_details, request) status_code = await response.code() span.set_data("code", status_code.name) return response class SentryUnaryStreamClientInterceptor( ClientInterceptor, UnaryStreamClientInterceptor # type: ignore ): async def intercept_unary_stream( self, continuation: Callable[[ClientCallDetails, Message], UnaryStreamCall], client_call_details: ClientCallDetails, request: Message, ) -> Union[AsyncIterable[Any], UnaryStreamCall]: method = client_call_details.method with sentry_sdk.start_span( op=OP.GRPC_CLIENT, name="unary stream call to %s" % method.decode(), origin=SPAN_ORIGIN, ) as span: span.set_data("type", "unary stream") span.set_data("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details ) response = await continuation(client_call_details, request) # status_code = await response.code() # span.set_data("code", status_code) return response sentry-python-2.18.0/sentry_sdk/integrations/grpc/aio/server.py000066400000000000000000000076741471214654000247450ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM from sentry_sdk.utils import event_from_exception from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Awaitable, Callable from typing import Any, Optional try: import grpc from grpc import HandlerCallDetails, RpcMethodHandler from grpc.aio import AbortError, ServicerContext except ImportError: raise DidNotEnable("grpcio is not installed") class ServerInterceptor(grpc.aio.ServerInterceptor): # type: ignore def __init__(self, find_name=None): # type: (ServerInterceptor, Callable[[ServicerContext], str] | None) -> None self._find_method_name = find_name or self._find_name super().__init__() async def intercept_service(self, continuation, handler_call_details): # type: (ServerInterceptor, Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler]], HandlerCallDetails) -> Optional[Awaitable[RpcMethodHandler]] self._handler_call_details = handler_call_details handler = await continuation(handler_call_details) if handler is None: return None if not handler.request_streaming and not handler.response_streaming: handler_factory = grpc.unary_unary_rpc_method_handler async def wrapped(request, context): # type: (Any, ServicerContext) -> Any name = self._find_method_name(context) if not name: return await handler(request, context) # What if the headers are empty? transaction = Transaction.continue_from_headers( dict(context.invocation_metadata()), op=OP.GRPC_SERVER, name=name, source=TRANSACTION_SOURCE_CUSTOM, origin=SPAN_ORIGIN, ) with sentry_sdk.start_transaction(transaction=transaction): try: return await handler.unary_unary(request, context) except AbortError: raise except Exception as exc: event, hint = event_from_exception( exc, mechanism={"type": "grpc", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) raise elif not handler.request_streaming and handler.response_streaming: handler_factory = grpc.unary_stream_rpc_method_handler async def wrapped(request, context): # type: ignore # type: (Any, ServicerContext) -> Any async for r in handler.unary_stream(request, context): yield r elif handler.request_streaming and not handler.response_streaming: handler_factory = grpc.stream_unary_rpc_method_handler async def wrapped(request, context): # type: (Any, ServicerContext) -> Any response = handler.stream_unary(request, context) return await response elif handler.request_streaming and handler.response_streaming: handler_factory = grpc.stream_stream_rpc_method_handler async def wrapped(request, context): # type: ignore # type: (Any, ServicerContext) -> Any async for r in handler.stream_stream(request, context): yield r return handler_factory( wrapped, request_deserializer=handler.request_deserializer, response_serializer=handler.response_serializer, ) def _find_name(self, context): # type: (ServicerContext) -> str return self._handler_call_details.method sentry-python-2.18.0/sentry_sdk/integrations/grpc/client.py000066400000000000000000000064661471214654000241430ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable, Iterator, Iterable, Union try: import grpc from grpc import ClientCallDetails, Call from grpc._interceptor import _UnaryOutcome from grpc.aio._interceptor import UnaryStreamCall from google.protobuf.message import Message except ImportError: raise DidNotEnable("grpcio is not installed") class ClientInterceptor( grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor # type: ignore ): _is_intercepted = False def intercept_unary_unary(self, continuation, client_call_details, request): # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], _UnaryOutcome], ClientCallDetails, Message) -> _UnaryOutcome method = client_call_details.method with sentry_sdk.start_span( op=OP.GRPC_CLIENT, name="unary unary call to %s" % method, origin=SPAN_ORIGIN, ) as span: span.set_data("type", "unary unary") span.set_data("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details ) response = continuation(client_call_details, request) span.set_data("code", response.code().name) return response def intercept_unary_stream(self, continuation, client_call_details, request): # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall]], ClientCallDetails, Message) -> Union[Iterator[Message], Call] method = client_call_details.method with sentry_sdk.start_span( op=OP.GRPC_CLIENT, name="unary stream call to %s" % method, origin=SPAN_ORIGIN, ) as span: span.set_data("type", "unary stream") span.set_data("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details ) response = continuation( client_call_details, request ) # type: UnaryStreamCall # Setting code on unary-stream leads to execution getting stuck # span.set_data("code", response.code().name) return response @staticmethod def _update_client_call_details_metadata_from_scope(client_call_details): # type: (ClientCallDetails) -> ClientCallDetails metadata = ( list(client_call_details.metadata) if client_call_details.metadata else [] ) for ( key, value, ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(): metadata.append((key, value)) client_call_details = grpc._interceptor._ClientCallDetails( method=client_call_details.method, timeout=client_call_details.timeout, metadata=metadata, credentials=client_call_details.credentials, wait_for_ready=client_call_details.wait_for_ready, compression=client_call_details.compression, ) return client_call_details sentry-python-2.18.0/sentry_sdk/integrations/grpc/consts.py000066400000000000000000000000371471214654000241620ustar00rootroot00000000000000SPAN_ORIGIN = "auto.grpc.grpc" sentry-python-2.18.0/sentry_sdk/integrations/grpc/server.py000066400000000000000000000046631471214654000241700ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Callable, Optional from google.protobuf.message import Message try: import grpc from grpc import ServicerContext, HandlerCallDetails, RpcMethodHandler except ImportError: raise DidNotEnable("grpcio is not installed") class ServerInterceptor(grpc.ServerInterceptor): # type: ignore def __init__(self, find_name=None): # type: (ServerInterceptor, Optional[Callable[[ServicerContext], str]]) -> None self._find_method_name = find_name or ServerInterceptor._find_name super().__init__() def intercept_service(self, continuation, handler_call_details): # type: (ServerInterceptor, Callable[[HandlerCallDetails], RpcMethodHandler], HandlerCallDetails) -> RpcMethodHandler handler = continuation(handler_call_details) if not handler or not handler.unary_unary: return handler def behavior(request, context): # type: (Message, ServicerContext) -> Message with sentry_sdk.isolation_scope(): name = self._find_method_name(context) if name: metadata = dict(context.invocation_metadata()) transaction = Transaction.continue_from_headers( metadata, op=OP.GRPC_SERVER, name=name, source=TRANSACTION_SOURCE_CUSTOM, origin=SPAN_ORIGIN, ) with sentry_sdk.start_transaction(transaction=transaction): try: return handler.unary_unary(request, context) except BaseException as e: raise e else: return handler.unary_unary(request, context) return grpc.unary_unary_rpc_method_handler( behavior, request_deserializer=handler.request_deserializer, response_serializer=handler.response_serializer, ) @staticmethod def _find_name(context): # type: (ServicerContext) -> str return context._rpc_event.call_details.method.decode() sentry-python-2.18.0/sentry_sdk/integrations/httpx.py000066400000000000000000000122341471214654000230670ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.tracing import BAGGAGE_HEADER_NAME from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import ( SENSITIVE_DATA_SUBSTITUTE, capture_internal_exceptions, ensure_integration_enabled, logger, parse_url, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any try: from httpx import AsyncClient, Client, Request, Response # type: ignore except ImportError: raise DidNotEnable("httpx is not installed") __all__ = ["HttpxIntegration"] class HttpxIntegration(Integration): identifier = "httpx" origin = f"auto.http.{identifier}" @staticmethod def setup_once(): # type: () -> None """ httpx has its own transport layer and can be customized when needed, so patch Client.send and AsyncClient.send to support both synchronous and async interfaces. """ _install_httpx_client() _install_httpx_async_client() def _install_httpx_client(): # type: () -> None real_send = Client.send @ensure_integration_enabled(HttpxIntegration, real_send) def send(self, request, **kwargs): # type: (Client, Request, **Any) -> Response parsed_url = None with capture_internal_exceptions(): parsed_url = parse_url(str(request.url), sanitize=False) with sentry_sdk.start_span( op=OP.HTTP_CLIENT, name="%s %s" % ( request.method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), origin=HttpxIntegration.origin, ) as span: span.set_data(SPANDATA.HTTP_METHOD, request.method) if parsed_url is not None: span.set_data("url", parsed_url.url) span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) if should_propagate_trace(sentry_sdk.get_client(), str(request.url)): for ( key, value, ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(): logger.debug( "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( key=key, value=value, url=request.url ) ) if key == BAGGAGE_HEADER_NAME and request.headers.get( BAGGAGE_HEADER_NAME ): # do not overwrite any existing baggage, just append to it request.headers[key] += "," + value else: request.headers[key] = value rv = real_send(self, request, **kwargs) span.set_http_status(rv.status_code) span.set_data("reason", rv.reason_phrase) return rv Client.send = send def _install_httpx_async_client(): # type: () -> None real_send = AsyncClient.send async def send(self, request, **kwargs): # type: (AsyncClient, Request, **Any) -> Response if sentry_sdk.get_client().get_integration(HttpxIntegration) is None: return await real_send(self, request, **kwargs) parsed_url = None with capture_internal_exceptions(): parsed_url = parse_url(str(request.url), sanitize=False) with sentry_sdk.start_span( op=OP.HTTP_CLIENT, name="%s %s" % ( request.method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), origin=HttpxIntegration.origin, ) as span: span.set_data(SPANDATA.HTTP_METHOD, request.method) if parsed_url is not None: span.set_data("url", parsed_url.url) span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) if should_propagate_trace(sentry_sdk.get_client(), str(request.url)): for ( key, value, ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(): logger.debug( "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( key=key, value=value, url=request.url ) ) if key == BAGGAGE_HEADER_NAME and request.headers.get( BAGGAGE_HEADER_NAME ): # do not overwrite any existing baggage, just append to it request.headers[key] += "," + value else: request.headers[key] = value rv = await real_send(self, request, **kwargs) span.set_http_status(rv.status_code) span.set_data("reason", rv.reason_phrase) return rv AsyncClient.send = send sentry-python-2.18.0/sentry_sdk/integrations/huey.py000066400000000000000000000125121471214654000226710ustar00rootroot00000000000000import sys from datetime import datetime import sentry_sdk from sentry_sdk.api import continue_trace, get_baggage, get_traceparent from sentry_sdk.consts import OP, SPANSTATUS from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, TRANSACTION_SOURCE_TASK, ) from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, SENSITIVE_DATA_SUBSTITUTE, reraise, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable, Optional, Union, TypeVar from sentry_sdk._types import EventProcessor, Event, Hint from sentry_sdk.utils import ExcInfo F = TypeVar("F", bound=Callable[..., Any]) try: from huey.api import Huey, Result, ResultGroup, Task, PeriodicTask from huey.exceptions import CancelExecution, RetryTask, TaskLockedException except ImportError: raise DidNotEnable("Huey is not installed") HUEY_CONTROL_FLOW_EXCEPTIONS = (CancelExecution, RetryTask, TaskLockedException) class HueyIntegration(Integration): identifier = "huey" origin = f"auto.queue.{identifier}" @staticmethod def setup_once(): # type: () -> None patch_enqueue() patch_execute() def patch_enqueue(): # type: () -> None old_enqueue = Huey.enqueue @ensure_integration_enabled(HueyIntegration, old_enqueue) def _sentry_enqueue(self, task): # type: (Huey, Task) -> Optional[Union[Result, ResultGroup]] with sentry_sdk.start_span( op=OP.QUEUE_SUBMIT_HUEY, name=task.name, origin=HueyIntegration.origin, ): if not isinstance(task, PeriodicTask): # Attach trace propagation data to task kwargs. We do # not do this for periodic tasks, as these don't # really have an originating transaction. task.kwargs["sentry_headers"] = { BAGGAGE_HEADER_NAME: get_baggage(), SENTRY_TRACE_HEADER_NAME: get_traceparent(), } return old_enqueue(self, task) Huey.enqueue = _sentry_enqueue def _make_event_processor(task): # type: (Any) -> EventProcessor def event_processor(event, hint): # type: (Event, Hint) -> Optional[Event] with capture_internal_exceptions(): tags = event.setdefault("tags", {}) tags["huey_task_id"] = task.id tags["huey_task_retry"] = task.default_retries > task.retries extra = event.setdefault("extra", {}) extra["huey-job"] = { "task": task.name, "args": ( task.args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE ), "kwargs": ( task.kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE ), "retry": (task.default_retries or 0) - task.retries, } return event return event_processor def _capture_exception(exc_info): # type: (ExcInfo) -> None scope = sentry_sdk.get_current_scope() if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: scope.transaction.set_status(SPANSTATUS.ABORTED) return scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR) event, hint = event_from_exception( exc_info, client_options=sentry_sdk.get_client().options, mechanism={"type": HueyIntegration.identifier, "handled": False}, ) scope.capture_event(event, hint=hint) def _wrap_task_execute(func): # type: (F) -> F @ensure_integration_enabled(HueyIntegration, func) def _sentry_execute(*args, **kwargs): # type: (*Any, **Any) -> Any try: result = func(*args, **kwargs) except Exception: exc_info = sys.exc_info() _capture_exception(exc_info) reraise(*exc_info) return result return _sentry_execute # type: ignore def patch_execute(): # type: () -> None old_execute = Huey._execute @ensure_integration_enabled(HueyIntegration, old_execute) def _sentry_execute(self, task, timestamp=None): # type: (Huey, Task, Optional[datetime]) -> Any with sentry_sdk.isolation_scope() as scope: with capture_internal_exceptions(): scope._name = "huey" scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(task)) sentry_headers = task.kwargs.pop("sentry_headers", None) transaction = continue_trace( sentry_headers or {}, name=task.name, op=OP.QUEUE_TASK_HUEY, source=TRANSACTION_SOURCE_TASK, origin=HueyIntegration.origin, ) transaction.set_status(SPANSTATUS.OK) if not getattr(task, "_sentry_is_patched", False): task.execute = _wrap_task_execute(task.execute) task._sentry_is_patched = True with sentry_sdk.start_transaction(transaction): return old_execute(self, task, timestamp) Huey._execute = _sentry_execute sentry-python-2.18.0/sentry_sdk/integrations/huggingface_hub.py000066400000000000000000000146111471214654000250260ustar00rootroot00000000000000from functools import wraps from sentry_sdk import consts from sentry_sdk.ai.monitoring import record_token_usage from sentry_sdk.ai.utils import set_data_normalized from sentry_sdk.consts import SPANDATA from typing import Any, Iterable, Callable import sentry_sdk from sentry_sdk.scope import should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, ) try: import huggingface_hub.inference._client from huggingface_hub import ChatCompletionStreamOutput, TextGenerationOutput except ImportError: raise DidNotEnable("Huggingface not installed") class HuggingfaceHubIntegration(Integration): identifier = "huggingface_hub" origin = f"auto.ai.{identifier}" def __init__(self, include_prompts=True): # type: (HuggingfaceHubIntegration, bool) -> None self.include_prompts = include_prompts @staticmethod def setup_once(): # type: () -> None huggingface_hub.inference._client.InferenceClient.text_generation = ( _wrap_text_generation( huggingface_hub.inference._client.InferenceClient.text_generation ) ) def _capture_exception(exc): # type: (Any) -> None event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, mechanism={"type": "huggingface_hub", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _wrap_text_generation(f): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(f) def new_text_generation(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(HuggingfaceHubIntegration) if integration is None: return f(*args, **kwargs) if "prompt" in kwargs: prompt = kwargs["prompt"] elif len(args) >= 2: kwargs["prompt"] = args[1] prompt = kwargs["prompt"] args = (args[0],) + args[2:] else: # invalid call, let it return error return f(*args, **kwargs) model = kwargs.get("model") streaming = kwargs.get("stream") span = sentry_sdk.start_span( op=consts.OP.HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE, name="Text Generation", origin=HuggingfaceHubIntegration.origin, ) span.__enter__() try: res = f(*args, **kwargs) except Exception as e: _capture_exception(e) span.__exit__(None, None, None) raise e from None with capture_internal_exceptions(): if should_send_default_pii() and integration.include_prompts: set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, prompt) set_data_normalized(span, SPANDATA.AI_MODEL_ID, model) set_data_normalized(span, SPANDATA.AI_STREAMING, streaming) if isinstance(res, str): if should_send_default_pii() and integration.include_prompts: set_data_normalized( span, "ai.responses", [res], ) span.__exit__(None, None, None) return res if isinstance(res, TextGenerationOutput): if should_send_default_pii() and integration.include_prompts: set_data_normalized( span, "ai.responses", [res.generated_text], ) if res.details is not None and res.details.generated_tokens > 0: record_token_usage(span, total_tokens=res.details.generated_tokens) span.__exit__(None, None, None) return res if not isinstance(res, Iterable): # we only know how to deal with strings and iterables, ignore set_data_normalized(span, "unknown_response", True) span.__exit__(None, None, None) return res if kwargs.get("details", False): # res is Iterable[TextGenerationStreamOutput] def new_details_iterator(): # type: () -> Iterable[ChatCompletionStreamOutput] with capture_internal_exceptions(): tokens_used = 0 data_buf: list[str] = [] for x in res: if hasattr(x, "token") and hasattr(x.token, "text"): data_buf.append(x.token.text) if hasattr(x, "details") and hasattr( x.details, "generated_tokens" ): tokens_used = x.details.generated_tokens yield x if ( len(data_buf) > 0 and should_send_default_pii() and integration.include_prompts ): set_data_normalized( span, SPANDATA.AI_RESPONSES, "".join(data_buf) ) if tokens_used > 0: record_token_usage(span, total_tokens=tokens_used) span.__exit__(None, None, None) return new_details_iterator() else: # res is Iterable[str] def new_iterator(): # type: () -> Iterable[str] data_buf: list[str] = [] with capture_internal_exceptions(): for s in res: if isinstance(s, str): data_buf.append(s) yield s if ( len(data_buf) > 0 and should_send_default_pii() and integration.include_prompts ): set_data_normalized( span, SPANDATA.AI_RESPONSES, "".join(data_buf) ) span.__exit__(None, None, None) return new_iterator() return new_text_generation sentry-python-2.18.0/sentry_sdk/integrations/langchain.py000066400000000000000000000424661471214654000236560ustar00rootroot00000000000000from collections import OrderedDict from functools import wraps import sentry_sdk from sentry_sdk.ai.monitoring import set_ai_pipeline_name, record_token_usage from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.ai.utils import set_data_normalized from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import Span from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.utils import logger, capture_internal_exceptions from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, List, Callable, Dict, Union, Optional from uuid import UUID try: from langchain_core.messages import BaseMessage from langchain_core.outputs import LLMResult from langchain_core.callbacks import ( manager, BaseCallbackHandler, ) from langchain_core.agents import AgentAction, AgentFinish except ImportError: raise DidNotEnable("langchain not installed") DATA_FIELDS = { "temperature": SPANDATA.AI_TEMPERATURE, "top_p": SPANDATA.AI_TOP_P, "top_k": SPANDATA.AI_TOP_K, "function_call": SPANDATA.AI_FUNCTION_CALL, "tool_calls": SPANDATA.AI_TOOL_CALLS, "tools": SPANDATA.AI_TOOLS, "response_format": SPANDATA.AI_RESPONSE_FORMAT, "logit_bias": SPANDATA.AI_LOGIT_BIAS, "tags": SPANDATA.AI_TAGS, } # To avoid double collecting tokens, we do *not* measure # token counts for models for which we have an explicit integration NO_COLLECT_TOKEN_MODELS = [ "openai-chat", "anthropic-chat", "cohere-chat", "huggingface_endpoint", ] class LangchainIntegration(Integration): identifier = "langchain" origin = f"auto.ai.{identifier}" # The most number of spans (e.g., LLM calls) that can be processed at the same time. max_spans = 1024 def __init__( self, include_prompts=True, max_spans=1024, tiktoken_encoding_name=None ): # type: (LangchainIntegration, bool, int, Optional[str]) -> None self.include_prompts = include_prompts self.max_spans = max_spans self.tiktoken_encoding_name = tiktoken_encoding_name @staticmethod def setup_once(): # type: () -> None manager._configure = _wrap_configure(manager._configure) class WatchedSpan: span = None # type: Span num_completion_tokens = 0 # type: int num_prompt_tokens = 0 # type: int no_collect_tokens = False # type: bool children = [] # type: List[WatchedSpan] is_pipeline = False # type: bool def __init__(self, span): # type: (Span) -> None self.span = span class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc] """Base callback handler that can be used to handle callbacks from langchain.""" span_map = OrderedDict() # type: OrderedDict[UUID, WatchedSpan] max_span_map_size = 0 def __init__(self, max_span_map_size, include_prompts, tiktoken_encoding_name=None): # type: (int, bool, Optional[str]) -> None self.max_span_map_size = max_span_map_size self.include_prompts = include_prompts self.tiktoken_encoding = None if tiktoken_encoding_name is not None: import tiktoken # type: ignore self.tiktoken_encoding = tiktoken.get_encoding(tiktoken_encoding_name) def count_tokens(self, s): # type: (str) -> int if self.tiktoken_encoding is not None: return len(self.tiktoken_encoding.encode_ordinary(s)) return 0 def gc_span_map(self): # type: () -> None while len(self.span_map) > self.max_span_map_size: run_id, watched_span = self.span_map.popitem(last=False) self._exit_span(watched_span, run_id) def _handle_error(self, run_id, error): # type: (UUID, Any) -> None if not run_id or run_id not in self.span_map: return span_data = self.span_map[run_id] if not span_data: return sentry_sdk.capture_exception(error, span_data.span.scope) span_data.span.__exit__(None, None, None) del self.span_map[run_id] def _normalize_langchain_message(self, message): # type: (BaseMessage) -> Any parsed = {"content": message.content, "role": message.type} parsed.update(message.additional_kwargs) return parsed def _create_span(self, run_id, parent_id, **kwargs): # type: (SentryLangchainCallback, UUID, Optional[Any], Any) -> WatchedSpan watched_span = None # type: Optional[WatchedSpan] if parent_id: parent_span = self.span_map.get(parent_id) # type: Optional[WatchedSpan] if parent_span: watched_span = WatchedSpan(parent_span.span.start_child(**kwargs)) parent_span.children.append(watched_span) if watched_span is None: watched_span = WatchedSpan(sentry_sdk.start_span(**kwargs)) if kwargs.get("op", "").startswith("ai.pipeline."): if kwargs.get("name"): set_ai_pipeline_name(kwargs.get("name")) watched_span.is_pipeline = True watched_span.span.__enter__() self.span_map[run_id] = watched_span self.gc_span_map() return watched_span def _exit_span(self, span_data, run_id): # type: (SentryLangchainCallback, WatchedSpan, UUID) -> None if span_data.is_pipeline: set_ai_pipeline_name(None) span_data.span.__exit__(None, None, None) del self.span_map[run_id] def on_llm_start( self, serialized, prompts, *, run_id, tags=None, parent_run_id=None, metadata=None, **kwargs, ): # type: (SentryLangchainCallback, Dict[str, Any], List[str], UUID, Optional[List[str]], Optional[UUID], Optional[Dict[str, Any]], Any) -> Any """Run when LLM starts running.""" with capture_internal_exceptions(): if not run_id: return all_params = kwargs.get("invocation_params", {}) all_params.update(serialized.get("kwargs", {})) watched_span = self._create_span( run_id, kwargs.get("parent_run_id"), op=OP.LANGCHAIN_RUN, name=kwargs.get("name") or "Langchain LLM call", origin=LangchainIntegration.origin, ) span = watched_span.span if should_send_default_pii() and self.include_prompts: set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, prompts) for k, v in DATA_FIELDS.items(): if k in all_params: set_data_normalized(span, v, all_params[k]) def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): # type: (SentryLangchainCallback, Dict[str, Any], List[List[BaseMessage]], UUID, Any) -> Any """Run when Chat Model starts running.""" with capture_internal_exceptions(): if not run_id: return all_params = kwargs.get("invocation_params", {}) all_params.update(serialized.get("kwargs", {})) watched_span = self._create_span( run_id, kwargs.get("parent_run_id"), op=OP.LANGCHAIN_CHAT_COMPLETIONS_CREATE, name=kwargs.get("name") or "Langchain Chat Model", origin=LangchainIntegration.origin, ) span = watched_span.span model = all_params.get( "model", all_params.get("model_name", all_params.get("model_id")) ) watched_span.no_collect_tokens = any( x in all_params.get("_type", "") for x in NO_COLLECT_TOKEN_MODELS ) if not model and "anthropic" in all_params.get("_type"): model = "claude-2" if model: span.set_data(SPANDATA.AI_MODEL_ID, model) if should_send_default_pii() and self.include_prompts: set_data_normalized( span, SPANDATA.AI_INPUT_MESSAGES, [ [self._normalize_langchain_message(x) for x in list_] for list_ in messages ], ) for k, v in DATA_FIELDS.items(): if k in all_params: set_data_normalized(span, v, all_params[k]) if not watched_span.no_collect_tokens: for list_ in messages: for message in list_: self.span_map[run_id].num_prompt_tokens += self.count_tokens( message.content ) + self.count_tokens(message.type) def on_llm_new_token(self, token, *, run_id, **kwargs): # type: (SentryLangchainCallback, str, UUID, Any) -> Any """Run on new LLM token. Only available when streaming is enabled.""" with capture_internal_exceptions(): if not run_id or run_id not in self.span_map: return span_data = self.span_map[run_id] if not span_data or span_data.no_collect_tokens: return span_data.num_completion_tokens += self.count_tokens(token) def on_llm_end(self, response, *, run_id, **kwargs): # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any """Run when LLM ends running.""" with capture_internal_exceptions(): if not run_id: return token_usage = ( response.llm_output.get("token_usage") if response.llm_output else None ) span_data = self.span_map[run_id] if not span_data: return if should_send_default_pii() and self.include_prompts: set_data_normalized( span_data.span, SPANDATA.AI_RESPONSES, [[x.text for x in list_] for list_ in response.generations], ) if not span_data.no_collect_tokens: if token_usage: record_token_usage( span_data.span, token_usage.get("prompt_tokens"), token_usage.get("completion_tokens"), token_usage.get("total_tokens"), ) else: record_token_usage( span_data.span, span_data.num_prompt_tokens, span_data.num_completion_tokens, ) self._exit_span(span_data, run_id) def on_llm_error(self, error, *, run_id, **kwargs): # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any """Run when LLM errors.""" with capture_internal_exceptions(): self._handle_error(run_id, error) def on_chain_start(self, serialized, inputs, *, run_id, **kwargs): # type: (SentryLangchainCallback, Dict[str, Any], Dict[str, Any], UUID, Any) -> Any """Run when chain starts running.""" with capture_internal_exceptions(): if not run_id: return watched_span = self._create_span( run_id, kwargs.get("parent_run_id"), op=( OP.LANGCHAIN_RUN if kwargs.get("parent_run_id") is not None else OP.LANGCHAIN_PIPELINE ), name=kwargs.get("name") or "Chain execution", origin=LangchainIntegration.origin, ) metadata = kwargs.get("metadata") if metadata: set_data_normalized(watched_span.span, SPANDATA.AI_METADATA, metadata) def on_chain_end(self, outputs, *, run_id, **kwargs): # type: (SentryLangchainCallback, Dict[str, Any], UUID, Any) -> Any """Run when chain ends running.""" with capture_internal_exceptions(): if not run_id or run_id not in self.span_map: return span_data = self.span_map[run_id] if not span_data: return self._exit_span(span_data, run_id) def on_chain_error(self, error, *, run_id, **kwargs): # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any """Run when chain errors.""" self._handle_error(run_id, error) def on_agent_action(self, action, *, run_id, **kwargs): # type: (SentryLangchainCallback, AgentAction, UUID, Any) -> Any with capture_internal_exceptions(): if not run_id: return watched_span = self._create_span( run_id, kwargs.get("parent_run_id"), op=OP.LANGCHAIN_AGENT, name=action.tool or "AI tool usage", origin=LangchainIntegration.origin, ) if action.tool_input and should_send_default_pii() and self.include_prompts: set_data_normalized( watched_span.span, SPANDATA.AI_INPUT_MESSAGES, action.tool_input ) def on_agent_finish(self, finish, *, run_id, **kwargs): # type: (SentryLangchainCallback, AgentFinish, UUID, Any) -> Any with capture_internal_exceptions(): if not run_id: return span_data = self.span_map[run_id] if not span_data: return if should_send_default_pii() and self.include_prompts: set_data_normalized( span_data.span, SPANDATA.AI_RESPONSES, finish.return_values.items() ) self._exit_span(span_data, run_id) def on_tool_start(self, serialized, input_str, *, run_id, **kwargs): # type: (SentryLangchainCallback, Dict[str, Any], str, UUID, Any) -> Any """Run when tool starts running.""" with capture_internal_exceptions(): if not run_id: return watched_span = self._create_span( run_id, kwargs.get("parent_run_id"), op=OP.LANGCHAIN_TOOL, name=serialized.get("name") or kwargs.get("name") or "AI tool usage", origin=LangchainIntegration.origin, ) if should_send_default_pii() and self.include_prompts: set_data_normalized( watched_span.span, SPANDATA.AI_INPUT_MESSAGES, kwargs.get("inputs", [input_str]), ) if kwargs.get("metadata"): set_data_normalized( watched_span.span, SPANDATA.AI_METADATA, kwargs.get("metadata") ) def on_tool_end(self, output, *, run_id, **kwargs): # type: (SentryLangchainCallback, str, UUID, Any) -> Any """Run when tool ends running.""" with capture_internal_exceptions(): if not run_id or run_id not in self.span_map: return span_data = self.span_map[run_id] if not span_data: return if should_send_default_pii() and self.include_prompts: set_data_normalized(span_data.span, SPANDATA.AI_RESPONSES, output) self._exit_span(span_data, run_id) def on_tool_error(self, error, *args, run_id, **kwargs): # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any """Run when tool errors.""" self._handle_error(run_id, error) def _wrap_configure(f): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(f) def new_configure(*args, **kwargs): # type: (Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(LangchainIntegration) if integration is None: return f(*args, **kwargs) with capture_internal_exceptions(): new_callbacks = [] # type: List[BaseCallbackHandler] if "local_callbacks" in kwargs: existing_callbacks = kwargs["local_callbacks"] kwargs["local_callbacks"] = new_callbacks elif len(args) > 2: existing_callbacks = args[2] args = ( args[0], args[1], new_callbacks, ) + args[3:] else: existing_callbacks = [] if existing_callbacks: if isinstance(existing_callbacks, list): for cb in existing_callbacks: new_callbacks.append(cb) elif isinstance(existing_callbacks, BaseCallbackHandler): new_callbacks.append(existing_callbacks) else: logger.debug("Unknown callback type: %s", existing_callbacks) already_added = False for callback in new_callbacks: if isinstance(callback, SentryLangchainCallback): already_added = True if not already_added: new_callbacks.append( SentryLangchainCallback( integration.max_spans, integration.include_prompts, integration.tiktoken_encoding_name, ) ) return f(*args, **kwargs) return new_configure sentry-python-2.18.0/sentry_sdk/integrations/launchdarkly.py000066400000000000000000000040661471214654000244050ustar00rootroot00000000000000from typing import TYPE_CHECKING import sentry_sdk from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.flag_utils import flag_error_processor try: import ldclient from ldclient.hook import Hook, Metadata if TYPE_CHECKING: from ldclient import LDClient from ldclient.hook import EvaluationSeriesContext from ldclient.evaluation import EvaluationDetail from typing import Any except ImportError: raise DidNotEnable("LaunchDarkly is not installed") class LaunchDarklyIntegration(Integration): identifier = "launchdarkly" def __init__(self, ld_client=None): # type: (LDClient | None) -> None """ :param client: An initialized LDClient instance. If a client is not provided, this integration will attempt to use the shared global instance. """ try: client = ld_client or ldclient.get() except Exception as exc: raise DidNotEnable("Error getting LaunchDarkly client. " + repr(exc)) if not client.is_initialized(): raise DidNotEnable("LaunchDarkly client is not initialized.") # Register the flag collection hook with the LD client. client.add_hook(LaunchDarklyHook()) @staticmethod def setup_once(): # type: () -> None scope = sentry_sdk.get_current_scope() scope.add_error_processor(flag_error_processor) class LaunchDarklyHook(Hook): @property def metadata(self): # type: () -> Metadata return Metadata(name="sentry-feature-flag-recorder") def after_evaluation(self, series_context, data, detail): # type: (EvaluationSeriesContext, dict[Any, Any], EvaluationDetail) -> dict[Any, Any] if isinstance(detail.value, bool): flags = sentry_sdk.get_current_scope().flags flags.set(series_context.key, detail.value) return data def before_evaluation(self, series_context, data): # type: (EvaluationSeriesContext, dict[Any, Any]) -> dict[Any, Any] return data # No-op. sentry-python-2.18.0/sentry_sdk/integrations/litestar.py000066400000000000000000000252641471214654000235560ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, transaction_from_function, ) try: from litestar import Request, Litestar # type: ignore from litestar.handlers.base import BaseRouteHandler # type: ignore from litestar.middleware import DefineMiddleware # type: ignore from litestar.routes.http import HTTPRoute # type: ignore from litestar.data_extractors import ConnectionDataExtractor # type: ignore except ImportError: raise DidNotEnable("Litestar is not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Optional, Union from litestar.types.asgi_types import ASGIApp # type: ignore from litestar.types import ( # type: ignore HTTPReceiveMessage, HTTPScope, Message, Middleware, Receive, Scope as LitestarScope, Send, WebSocketReceiveMessage, ) from litestar.middleware import MiddlewareProtocol from sentry_sdk._types import Event, Hint _DEFAULT_TRANSACTION_NAME = "generic Litestar request" class LitestarIntegration(Integration): identifier = "litestar" origin = f"auto.http.{identifier}" @staticmethod def setup_once(): # type: () -> None patch_app_init() patch_middlewares() patch_http_route_handle() # The following line follows the pattern found in other integrations such as `DjangoIntegration.setup_once`. # The Litestar `ExceptionHandlerMiddleware.__call__` catches exceptions and does the following # (among other things): # 1. Logs them, some at least (such as 500s) as errors # 2. Calls after_exception hooks # The `LitestarIntegration`` provides an after_exception hook (see `patch_app_init` below) to create a Sentry event # from an exception, which ends up being called during step 2 above. However, the Sentry `LoggingIntegration` will # by default create a Sentry event from error logs made in step 1 if we do not prevent it from doing so. ignore_logger("litestar") class SentryLitestarASGIMiddleware(SentryAsgiMiddleware): def __init__(self, app, span_origin=LitestarIntegration.origin): # type: (ASGIApp, str) -> None super().__init__( app=app, unsafe_context_data=False, transaction_style="endpoint", mechanism_type="asgi", span_origin=span_origin, ) def patch_app_init(): # type: () -> None """ Replaces the Litestar class's `__init__` function in order to inject `after_exception` handlers and set the `SentryLitestarASGIMiddleware` as the outmost middleware in the stack. See: - https://docs.litestar.dev/2/usage/applications.html#after-exception - https://docs.litestar.dev/2/usage/middleware/using-middleware.html """ old__init__ = Litestar.__init__ @ensure_integration_enabled(LitestarIntegration, old__init__) def injection_wrapper(self, *args, **kwargs): # type: (Litestar, *Any, **Any) -> None kwargs["after_exception"] = [ exception_handler, *(kwargs.get("after_exception") or []), ] SentryLitestarASGIMiddleware.__call__ = SentryLitestarASGIMiddleware._run_asgi3 # type: ignore middleware = kwargs.get("middleware") or [] kwargs["middleware"] = [SentryLitestarASGIMiddleware, *middleware] old__init__(self, *args, **kwargs) Litestar.__init__ = injection_wrapper def patch_middlewares(): # type: () -> None old_resolve_middleware_stack = BaseRouteHandler.resolve_middleware @ensure_integration_enabled(LitestarIntegration, old_resolve_middleware_stack) def resolve_middleware_wrapper(self): # type: (BaseRouteHandler) -> list[Middleware] return [ enable_span_for_middleware(middleware) for middleware in old_resolve_middleware_stack(self) ] BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper def enable_span_for_middleware(middleware): # type: (Middleware) -> Middleware if ( not hasattr(middleware, "__call__") # noqa: B004 or middleware is SentryLitestarASGIMiddleware ): return middleware if isinstance(middleware, DefineMiddleware): old_call = middleware.middleware.__call__ # type: ASGIApp else: old_call = middleware.__call__ async def _create_span_call(self, scope, receive, send): # type: (MiddlewareProtocol, LitestarScope, Receive, Send) -> None if sentry_sdk.get_client().get_integration(LitestarIntegration) is None: return await old_call(self, scope, receive, send) middleware_name = self.__class__.__name__ with sentry_sdk.start_span( op=OP.MIDDLEWARE_LITESTAR, name=middleware_name, origin=LitestarIntegration.origin, ) as middleware_span: middleware_span.set_tag("litestar.middleware_name", middleware_name) # Creating spans for the "receive" callback async def _sentry_receive(*args, **kwargs): # type: (*Any, **Any) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage] if sentry_sdk.get_client().get_integration(LitestarIntegration) is None: return await receive(*args, **kwargs) with sentry_sdk.start_span( op=OP.MIDDLEWARE_LITESTAR_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=LitestarIntegration.origin, ) as span: span.set_tag("litestar.middleware_name", middleware_name) return await receive(*args, **kwargs) receive_name = getattr(receive, "__name__", str(receive)) receive_patched = receive_name == "_sentry_receive" new_receive = _sentry_receive if not receive_patched else receive # Creating spans for the "send" callback async def _sentry_send(message): # type: (Message) -> None if sentry_sdk.get_client().get_integration(LitestarIntegration) is None: return await send(message) with sentry_sdk.start_span( op=OP.MIDDLEWARE_LITESTAR_SEND, name=getattr(send, "__qualname__", str(send)), origin=LitestarIntegration.origin, ) as span: span.set_tag("litestar.middleware_name", middleware_name) return await send(message) send_name = getattr(send, "__name__", str(send)) send_patched = send_name == "_sentry_send" new_send = _sentry_send if not send_patched else send return await old_call(self, scope, new_receive, new_send) not_yet_patched = old_call.__name__ not in ["_create_span_call"] if not_yet_patched: if isinstance(middleware, DefineMiddleware): middleware.middleware.__call__ = _create_span_call else: middleware.__call__ = _create_span_call return middleware def patch_http_route_handle(): # type: () -> None old_handle = HTTPRoute.handle async def handle_wrapper(self, scope, receive, send): # type: (HTTPRoute, HTTPScope, Receive, Send) -> None if sentry_sdk.get_client().get_integration(LitestarIntegration) is None: return await old_handle(self, scope, receive, send) sentry_scope = sentry_sdk.get_isolation_scope() request = scope["app"].request_class( scope=scope, receive=receive, send=send ) # type: Request[Any, Any] extracted_request_data = ConnectionDataExtractor( parse_body=True, parse_query=True )(request) body = extracted_request_data.pop("body") request_data = await body def event_processor(event, _): # type: (Event, Hint) -> Event route_handler = scope.get("route_handler") request_info = event.get("request", {}) request_info["content_length"] = len(scope.get("_body", b"")) if should_send_default_pii(): request_info["cookies"] = extracted_request_data["cookies"] if request_data is not None: request_info["data"] = request_data func = None if route_handler.name is not None: tx_name = route_handler.name # Accounts for use of type `Ref` in earlier versions of litestar without the need to reference it as a type elif hasattr(route_handler.fn, "value"): func = route_handler.fn.value else: func = route_handler.fn if func is not None: tx_name = transaction_from_function(func) tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]} if not tx_name: tx_name = _DEFAULT_TRANSACTION_NAME tx_info = {"source": TRANSACTION_SOURCE_ROUTE} event.update( { "request": request_info, "transaction": tx_name, "transaction_info": tx_info, } ) return event sentry_scope._name = LitestarIntegration.identifier sentry_scope.add_event_processor(event_processor) return await old_handle(self, scope, receive, send) HTTPRoute.handle = handle_wrapper def retrieve_user_from_scope(scope): # type: (LitestarScope) -> Optional[dict[str, Any]] scope_user = scope.get("user") if isinstance(scope_user, dict): return scope_user if hasattr(scope_user, "asdict"): # dataclasses return scope_user.asdict() return None @ensure_integration_enabled(LitestarIntegration) def exception_handler(exc, scope): # type: (Exception, LitestarScope) -> None user_info = None # type: Optional[dict[str, Any]] if should_send_default_pii(): user_info = retrieve_user_from_scope(scope) if user_info and isinstance(user_info, dict): sentry_scope = sentry_sdk.get_isolation_scope() sentry_scope.set_user(user_info) event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, mechanism={"type": LitestarIntegration.identifier, "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) sentry-python-2.18.0/sentry_sdk/integrations/logging.py000066400000000000000000000225461471214654000233550ustar00rootroot00000000000000import logging from datetime import datetime, timezone from fnmatch import fnmatch import sentry_sdk from sentry_sdk.utils import ( to_string, event_from_exception, current_stacktrace, capture_internal_exceptions, ) from sentry_sdk.integrations import Integration from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import MutableMapping from logging import LogRecord from typing import Any from typing import Dict from typing import Optional DEFAULT_LEVEL = logging.INFO DEFAULT_EVENT_LEVEL = logging.ERROR LOGGING_TO_EVENT_LEVEL = { logging.NOTSET: "notset", logging.DEBUG: "debug", logging.INFO: "info", logging.WARN: "warning", # WARN is same a WARNING logging.WARNING: "warning", logging.ERROR: "error", logging.FATAL: "fatal", logging.CRITICAL: "fatal", # CRITICAL is same as FATAL } # Capturing events from those loggers causes recursion errors. We cannot allow # the user to unconditionally create events from those loggers under any # circumstances. # # Note: Ignoring by logger name here is better than mucking with thread-locals. # We do not necessarily know whether thread-locals work 100% correctly in the user's environment. _IGNORED_LOGGERS = set( ["sentry_sdk.errors", "urllib3.connectionpool", "urllib3.connection"] ) def ignore_logger( name, # type: str ): # type: (...) -> None """This disables recording (both in breadcrumbs and as events) calls to a logger of a specific name. Among other uses, many of our integrations use this to prevent their actions being recorded as breadcrumbs. Exposed to users as a way to quiet spammy loggers. :param name: The name of the logger to ignore (same string you would pass to ``logging.getLogger``). """ _IGNORED_LOGGERS.add(name) class LoggingIntegration(Integration): identifier = "logging" def __init__(self, level=DEFAULT_LEVEL, event_level=DEFAULT_EVENT_LEVEL): # type: (Optional[int], Optional[int]) -> None self._handler = None self._breadcrumb_handler = None if level is not None: self._breadcrumb_handler = BreadcrumbHandler(level=level) if event_level is not None: self._handler = EventHandler(level=event_level) def _handle_record(self, record): # type: (LogRecord) -> None if self._handler is not None and record.levelno >= self._handler.level: self._handler.handle(record) if ( self._breadcrumb_handler is not None and record.levelno >= self._breadcrumb_handler.level ): self._breadcrumb_handler.handle(record) @staticmethod def setup_once(): # type: () -> None old_callhandlers = logging.Logger.callHandlers def sentry_patched_callhandlers(self, record): # type: (Any, LogRecord) -> Any # keeping a local reference because the # global might be discarded on shutdown ignored_loggers = _IGNORED_LOGGERS try: return old_callhandlers(self, record) finally: # This check is done twice, once also here before we even get # the integration. Otherwise we have a high chance of getting # into a recursion error when the integration is resolved # (this also is slower). if ignored_loggers is not None and record.name not in ignored_loggers: integration = sentry_sdk.get_client().get_integration( LoggingIntegration ) if integration is not None: integration._handle_record(record) logging.Logger.callHandlers = sentry_patched_callhandlers # type: ignore class _BaseHandler(logging.Handler): COMMON_RECORD_ATTRS = frozenset( ( "args", "created", "exc_info", "exc_text", "filename", "funcName", "levelname", "levelno", "linenno", "lineno", "message", "module", "msecs", "msg", "name", "pathname", "process", "processName", "relativeCreated", "stack", "tags", "taskName", "thread", "threadName", "stack_info", ) ) def _can_record(self, record): # type: (LogRecord) -> bool """Prevents ignored loggers from recording""" for logger in _IGNORED_LOGGERS: if fnmatch(record.name, logger): return False return True def _logging_to_event_level(self, record): # type: (LogRecord) -> str return LOGGING_TO_EVENT_LEVEL.get( record.levelno, record.levelname.lower() if record.levelname else "" ) def _extra_from_record(self, record): # type: (LogRecord) -> MutableMapping[str, object] return { k: v for k, v in vars(record).items() if k not in self.COMMON_RECORD_ATTRS and (not isinstance(k, str) or not k.startswith("_")) } class EventHandler(_BaseHandler): """ A logging handler that emits Sentry events for each log record Note that you do not have to use this class if the logging integration is enabled, which it is by default. """ def emit(self, record): # type: (LogRecord) -> Any with capture_internal_exceptions(): self.format(record) return self._emit(record) def _emit(self, record): # type: (LogRecord) -> None if not self._can_record(record): return client = sentry_sdk.get_client() if not client.is_active(): return client_options = client.options # exc_info might be None or (None, None, None) # # exc_info may also be any falsy value due to Python stdlib being # liberal with what it receives and Celery's billiard being "liberal" # with what it sends. See # https://github.com/getsentry/sentry-python/issues/904 if record.exc_info and record.exc_info[0] is not None: event, hint = event_from_exception( record.exc_info, client_options=client_options, mechanism={"type": "logging", "handled": True}, ) elif record.exc_info and record.exc_info[0] is None: event = {} hint = {} with capture_internal_exceptions(): event["threads"] = { "values": [ { "stacktrace": current_stacktrace( include_local_variables=client_options[ "include_local_variables" ], max_value_length=client_options["max_value_length"], ), "crashed": False, "current": True, } ] } else: event = {} hint = {} hint["log_record"] = record level = self._logging_to_event_level(record) if level in {"debug", "info", "warning", "error", "critical", "fatal"}: event["level"] = level # type: ignore[typeddict-item] event["logger"] = record.name # Log records from `warnings` module as separate issues record_caputured_from_warnings_module = ( record.name == "py.warnings" and record.msg == "%s" ) if record_caputured_from_warnings_module: # use the actual message and not "%s" as the message # this prevents grouping all warnings under one "%s" issue msg = record.args[0] # type: ignore event["logentry"] = { "message": msg, "params": (), } else: event["logentry"] = { "message": to_string(record.msg), "params": record.args, } event["extra"] = self._extra_from_record(record) sentry_sdk.capture_event(event, hint=hint) # Legacy name SentryHandler = EventHandler class BreadcrumbHandler(_BaseHandler): """ A logging handler that records breadcrumbs for each log record. Note that you do not have to use this class if the logging integration is enabled, which it is by default. """ def emit(self, record): # type: (LogRecord) -> Any with capture_internal_exceptions(): self.format(record) return self._emit(record) def _emit(self, record): # type: (LogRecord) -> None if not self._can_record(record): return sentry_sdk.add_breadcrumb( self._breadcrumb_from_record(record), hint={"log_record": record} ) def _breadcrumb_from_record(self, record): # type: (LogRecord) -> Dict[str, Any] return { "type": "log", "level": self._logging_to_event_level(record), "category": record.name, "message": record.message, "timestamp": datetime.fromtimestamp(record.created, timezone.utc), "data": self._extra_from_record(record), } sentry-python-2.18.0/sentry_sdk/integrations/loguru.py000066400000000000000000000056711471214654000232440ustar00rootroot00000000000000import enum from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ( BreadcrumbHandler, EventHandler, _BaseHandler, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from logging import LogRecord from typing import Optional, Tuple try: import loguru from loguru import logger from loguru._defaults import LOGURU_FORMAT as DEFAULT_FORMAT except ImportError: raise DidNotEnable("LOGURU is not installed") class LoggingLevels(enum.IntEnum): TRACE = 5 DEBUG = 10 INFO = 20 SUCCESS = 25 WARNING = 30 ERROR = 40 CRITICAL = 50 DEFAULT_LEVEL = LoggingLevels.INFO.value DEFAULT_EVENT_LEVEL = LoggingLevels.ERROR.value # We need to save the handlers to be able to remove them later # in tests (they call `LoguruIntegration.__init__` multiple times, # and we can't use `setup_once` because it's called before # than we get configuration). _ADDED_HANDLERS = (None, None) # type: Tuple[Optional[int], Optional[int]] class LoguruIntegration(Integration): identifier = "loguru" def __init__( self, level=DEFAULT_LEVEL, event_level=DEFAULT_EVENT_LEVEL, breadcrumb_format=DEFAULT_FORMAT, event_format=DEFAULT_FORMAT, ): # type: (Optional[int], Optional[int], str | loguru.FormatFunction, str | loguru.FormatFunction) -> None global _ADDED_HANDLERS breadcrumb_handler, event_handler = _ADDED_HANDLERS if breadcrumb_handler is not None: logger.remove(breadcrumb_handler) breadcrumb_handler = None if event_handler is not None: logger.remove(event_handler) event_handler = None if level is not None: breadcrumb_handler = logger.add( LoguruBreadcrumbHandler(level=level), level=level, format=breadcrumb_format, ) if event_level is not None: event_handler = logger.add( LoguruEventHandler(level=event_level), level=event_level, format=event_format, ) _ADDED_HANDLERS = (breadcrumb_handler, event_handler) @staticmethod def setup_once(): # type: () -> None pass # we do everything in __init__ class _LoguruBaseHandler(_BaseHandler): def _logging_to_event_level(self, record): # type: (LogRecord) -> str try: return LoggingLevels(record.levelno).name.lower() except ValueError: return record.levelname.lower() if record.levelname else "" class LoguruEventHandler(_LoguruBaseHandler, EventHandler): """Modified version of :class:`sentry_sdk.integrations.logging.EventHandler` to use loguru's level names.""" class LoguruBreadcrumbHandler(_LoguruBaseHandler, BreadcrumbHandler): """Modified version of :class:`sentry_sdk.integrations.logging.BreadcrumbHandler` to use loguru's level names.""" sentry-python-2.18.0/sentry_sdk/integrations/modules.py000066400000000000000000000014641471214654000233730ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor from sentry_sdk.utils import _get_installed_modules from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from sentry_sdk._types import Event class ModulesIntegration(Integration): identifier = "modules" @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def processor(event, hint): # type: (Event, Any) -> Event if event.get("type") == "transaction": return event if sentry_sdk.get_client().get_integration(ModulesIntegration) is None: return event event["modules"] = _get_installed_modules() return event sentry-python-2.18.0/sentry_sdk/integrations/openai.py000066400000000000000000000363141471214654000232000ustar00rootroot00000000000000from functools import wraps import sentry_sdk from sentry_sdk import consts from sentry_sdk.ai.monitoring import record_token_usage from sentry_sdk.ai.utils import set_data_normalized from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Iterable, List, Optional, Callable, AsyncIterator, Iterator from sentry_sdk.tracing import Span try: from openai.resources.chat.completions import Completions, AsyncCompletions from openai.resources import Embeddings, AsyncEmbeddings if TYPE_CHECKING: from openai.types.chat import ChatCompletionMessageParam, ChatCompletionChunk except ImportError: raise DidNotEnable("OpenAI not installed") class OpenAIIntegration(Integration): identifier = "openai" origin = f"auto.ai.{identifier}" def __init__(self, include_prompts=True, tiktoken_encoding_name=None): # type: (OpenAIIntegration, bool, Optional[str]) -> None self.include_prompts = include_prompts self.tiktoken_encoding = None if tiktoken_encoding_name is not None: import tiktoken # type: ignore self.tiktoken_encoding = tiktoken.get_encoding(tiktoken_encoding_name) @staticmethod def setup_once(): # type: () -> None Completions.create = _wrap_chat_completion_create(Completions.create) Embeddings.create = _wrap_embeddings_create(Embeddings.create) AsyncCompletions.create = _wrap_async_chat_completion_create( AsyncCompletions.create ) AsyncEmbeddings.create = _wrap_async_embeddings_create(AsyncEmbeddings.create) def count_tokens(self, s): # type: (OpenAIIntegration, str) -> int if self.tiktoken_encoding is not None: return len(self.tiktoken_encoding.encode_ordinary(s)) return 0 def _capture_exception(exc): # type: (Any) -> None event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, mechanism={"type": "openai", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _calculate_chat_completion_usage( messages, response, span, streaming_message_responses, count_tokens ): # type: (Iterable[ChatCompletionMessageParam], Any, Span, Optional[List[str]], Callable[..., Any]) -> None completion_tokens = 0 # type: Optional[int] prompt_tokens = 0 # type: Optional[int] total_tokens = 0 # type: Optional[int] if hasattr(response, "usage"): if hasattr(response.usage, "completion_tokens") and isinstance( response.usage.completion_tokens, int ): completion_tokens = response.usage.completion_tokens if hasattr(response.usage, "prompt_tokens") and isinstance( response.usage.prompt_tokens, int ): prompt_tokens = response.usage.prompt_tokens if hasattr(response.usage, "total_tokens") and isinstance( response.usage.total_tokens, int ): total_tokens = response.usage.total_tokens if prompt_tokens == 0: for message in messages: if "content" in message: prompt_tokens += count_tokens(message["content"]) if completion_tokens == 0: if streaming_message_responses is not None: for message in streaming_message_responses: completion_tokens += count_tokens(message) elif hasattr(response, "choices"): for choice in response.choices: if hasattr(choice, "message"): completion_tokens += count_tokens(choice.message) if prompt_tokens == 0: prompt_tokens = None if completion_tokens == 0: completion_tokens = None if total_tokens == 0: total_tokens = None record_token_usage(span, prompt_tokens, completion_tokens, total_tokens) def _new_chat_completion_common(f, *args, **kwargs): # type: (Any, *Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None: return f(*args, **kwargs) if "messages" not in kwargs: # invalid call (in all versions of openai), let it return error return f(*args, **kwargs) try: iter(kwargs["messages"]) except TypeError: # invalid call (in all versions), messages must be iterable return f(*args, **kwargs) kwargs["messages"] = list(kwargs["messages"]) messages = kwargs["messages"] model = kwargs.get("model") streaming = kwargs.get("stream") span = sentry_sdk.start_span( op=consts.OP.OPENAI_CHAT_COMPLETIONS_CREATE, description="Chat Completion", origin=OpenAIIntegration.origin, ) span.__enter__() res = yield f, args, kwargs with capture_internal_exceptions(): if should_send_default_pii() and integration.include_prompts: set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, messages) set_data_normalized(span, SPANDATA.AI_MODEL_ID, model) set_data_normalized(span, SPANDATA.AI_STREAMING, streaming) if hasattr(res, "choices"): if should_send_default_pii() and integration.include_prompts: set_data_normalized( span, "ai.responses", list(map(lambda x: x.message, res.choices)), ) _calculate_chat_completion_usage( messages, res, span, None, integration.count_tokens ) span.__exit__(None, None, None) elif hasattr(res, "_iterator"): data_buf: list[list[str]] = [] # one for each choice old_iterator = res._iterator def new_iterator(): # type: () -> Iterator[ChatCompletionChunk] with capture_internal_exceptions(): for x in old_iterator: if hasattr(x, "choices"): choice_index = 0 for choice in x.choices: if hasattr(choice, "delta") and hasattr( choice.delta, "content" ): content = choice.delta.content if len(data_buf) <= choice_index: data_buf.append([]) data_buf[choice_index].append(content or "") choice_index += 1 yield x if len(data_buf) > 0: all_responses = list( map(lambda chunk: "".join(chunk), data_buf) ) if should_send_default_pii() and integration.include_prompts: set_data_normalized( span, SPANDATA.AI_RESPONSES, all_responses ) _calculate_chat_completion_usage( messages, res, span, all_responses, integration.count_tokens, ) span.__exit__(None, None, None) async def new_iterator_async(): # type: () -> AsyncIterator[ChatCompletionChunk] with capture_internal_exceptions(): async for x in old_iterator: if hasattr(x, "choices"): choice_index = 0 for choice in x.choices: if hasattr(choice, "delta") and hasattr( choice.delta, "content" ): content = choice.delta.content if len(data_buf) <= choice_index: data_buf.append([]) data_buf[choice_index].append(content or "") choice_index += 1 yield x if len(data_buf) > 0: all_responses = list( map(lambda chunk: "".join(chunk), data_buf) ) if should_send_default_pii() and integration.include_prompts: set_data_normalized( span, SPANDATA.AI_RESPONSES, all_responses ) _calculate_chat_completion_usage( messages, res, span, all_responses, integration.count_tokens, ) span.__exit__(None, None, None) if str(type(res._iterator)) == "": res._iterator = new_iterator_async() else: res._iterator = new_iterator() else: set_data_normalized(span, "unknown_response", True) span.__exit__(None, None, None) return res def _wrap_chat_completion_create(f): # type: (Callable[..., Any]) -> Callable[..., Any] def _execute_sync(f, *args, **kwargs): # type: (Any, *Any, **Any) -> Any gen = _new_chat_completion_common(f, *args, **kwargs) try: f, args, kwargs = next(gen) except StopIteration as e: return e.value try: try: result = f(*args, **kwargs) except Exception as e: _capture_exception(e) raise e from None return gen.send(result) except StopIteration as e: return e.value @wraps(f) def _sentry_patched_create_sync(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None or "messages" not in kwargs: # no "messages" means invalid call (in all versions of openai), let it return error return f(*args, **kwargs) return _execute_sync(f, *args, **kwargs) return _sentry_patched_create_sync def _wrap_async_chat_completion_create(f): # type: (Callable[..., Any]) -> Callable[..., Any] async def _execute_async(f, *args, **kwargs): # type: (Any, *Any, **Any) -> Any gen = _new_chat_completion_common(f, *args, **kwargs) try: f, args, kwargs = next(gen) except StopIteration as e: return await e.value try: try: result = await f(*args, **kwargs) except Exception as e: _capture_exception(e) raise e from None return gen.send(result) except StopIteration as e: return e.value @wraps(f) async def _sentry_patched_create_async(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None or "messages" not in kwargs: # no "messages" means invalid call (in all versions of openai), let it return error return await f(*args, **kwargs) return await _execute_async(f, *args, **kwargs) return _sentry_patched_create_async def _new_embeddings_create_common(f, *args, **kwargs): # type: (Any, *Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None: return f(*args, **kwargs) with sentry_sdk.start_span( op=consts.OP.OPENAI_EMBEDDINGS_CREATE, description="OpenAI Embedding Creation", origin=OpenAIIntegration.origin, ) as span: if "input" in kwargs and ( should_send_default_pii() and integration.include_prompts ): if isinstance(kwargs["input"], str): set_data_normalized(span, "ai.input_messages", [kwargs["input"]]) elif ( isinstance(kwargs["input"], list) and len(kwargs["input"]) > 0 and isinstance(kwargs["input"][0], str) ): set_data_normalized(span, "ai.input_messages", kwargs["input"]) if "model" in kwargs: set_data_normalized(span, "ai.model_id", kwargs["model"]) response = yield f, args, kwargs prompt_tokens = 0 total_tokens = 0 if hasattr(response, "usage"): if hasattr(response.usage, "prompt_tokens") and isinstance( response.usage.prompt_tokens, int ): prompt_tokens = response.usage.prompt_tokens if hasattr(response.usage, "total_tokens") and isinstance( response.usage.total_tokens, int ): total_tokens = response.usage.total_tokens if prompt_tokens == 0: prompt_tokens = integration.count_tokens(kwargs["input"] or "") record_token_usage(span, prompt_tokens, None, total_tokens or prompt_tokens) return response def _wrap_embeddings_create(f): # type: (Any) -> Any def _execute_sync(f, *args, **kwargs): # type: (Any, *Any, **Any) -> Any gen = _new_embeddings_create_common(f, *args, **kwargs) try: f, args, kwargs = next(gen) except StopIteration as e: return e.value try: try: result = f(*args, **kwargs) except Exception as e: _capture_exception(e) raise e from None return gen.send(result) except StopIteration as e: return e.value @wraps(f) def _sentry_patched_create_sync(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None: return f(*args, **kwargs) return _execute_sync(f, *args, **kwargs) return _sentry_patched_create_sync def _wrap_async_embeddings_create(f): # type: (Any) -> Any async def _execute_async(f, *args, **kwargs): # type: (Any, *Any, **Any) -> Any gen = _new_embeddings_create_common(f, *args, **kwargs) try: f, args, kwargs = next(gen) except StopIteration as e: return await e.value try: try: result = await f(*args, **kwargs) except Exception as e: _capture_exception(e) raise e from None return gen.send(result) except StopIteration as e: return e.value @wraps(f) async def _sentry_patched_create_async(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None: return await f(*args, **kwargs) return await _execute_async(f, *args, **kwargs) return _sentry_patched_create_async sentry-python-2.18.0/sentry_sdk/integrations/openfeature.py000066400000000000000000000026621471214654000242410ustar00rootroot00000000000000from typing import TYPE_CHECKING import sentry_sdk from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.flag_utils import flag_error_processor try: from openfeature import api from openfeature.hook import Hook if TYPE_CHECKING: from openfeature.flag_evaluation import FlagEvaluationDetails from openfeature.hook import HookContext, HookHints except ImportError: raise DidNotEnable("OpenFeature is not installed") class OpenFeatureIntegration(Integration): identifier = "openfeature" @staticmethod def setup_once(): # type: () -> None scope = sentry_sdk.get_current_scope() scope.add_error_processor(flag_error_processor) # Register the hook within the global openfeature hooks list. api.add_hooks(hooks=[OpenFeatureHook()]) class OpenFeatureHook(Hook): def after(self, hook_context, details, hints): # type: (HookContext, FlagEvaluationDetails[bool], HookHints) -> None if isinstance(details.value, bool): flags = sentry_sdk.get_current_scope().flags flags.set(details.flag_key, details.value) def error(self, hook_context, exception, hints): # type: (HookContext, Exception, HookHints) -> None if isinstance(hook_context.default_value, bool): flags = sentry_sdk.get_current_scope().flags flags.set(hook_context.flag_key, hook_context.default_value) sentry-python-2.18.0/sentry_sdk/integrations/opentelemetry/000077500000000000000000000000001471214654000242405ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/integrations/opentelemetry/__init__.py000066400000000000000000000003451471214654000263530ustar00rootroot00000000000000from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator __all__ = [ "SentryPropagator", "SentrySpanProcessor", ] sentry-python-2.18.0/sentry_sdk/integrations/opentelemetry/consts.py000066400000000000000000000002171471214654000261230ustar00rootroot00000000000000from opentelemetry.context import create_key SENTRY_TRACE_KEY = create_key("sentry-trace") SENTRY_BAGGAGE_KEY = create_key("sentry-baggage") sentry-python-2.18.0/sentry_sdk/integrations/opentelemetry/integration.py000066400000000000000000000033771471214654000271470ustar00rootroot00000000000000""" IMPORTANT: The contents of this file are part of a proof of concept and as such are experimental and not suitable for production use. They may be changed or removed at any time without prior notice. """ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor from sentry_sdk.utils import logger try: from opentelemetry import trace from opentelemetry.propagate import set_global_textmap from opentelemetry.sdk.trace import TracerProvider except ImportError: raise DidNotEnable("opentelemetry not installed") try: from opentelemetry.instrumentation.django import DjangoInstrumentor # type: ignore[import-not-found] except ImportError: DjangoInstrumentor = None CONFIGURABLE_INSTRUMENTATIONS = { DjangoInstrumentor: {"is_sql_commentor_enabled": True}, } class OpenTelemetryIntegration(Integration): identifier = "opentelemetry" @staticmethod def setup_once(): # type: () -> None logger.warning( "[OTel] Initializing highly experimental OpenTelemetry support. " "Use at your own risk." ) _setup_sentry_tracing() # _setup_instrumentors() logger.debug("[OTel] Finished setting up OpenTelemetry integration") def _setup_sentry_tracing(): # type: () -> None provider = TracerProvider() provider.add_span_processor(SentrySpanProcessor()) trace.set_tracer_provider(provider) set_global_textmap(SentryPropagator()) def _setup_instrumentors(): # type: () -> None for instrumentor, kwargs in CONFIGURABLE_INSTRUMENTATIONS.items(): instrumentor().instrument(**kwargs) sentry-python-2.18.0/sentry_sdk/integrations/opentelemetry/propagator.py000066400000000000000000000072101471214654000267700ustar00rootroot00000000000000from opentelemetry import trace from opentelemetry.context import ( Context, get_current, set_value, ) from opentelemetry.propagators.textmap import ( CarrierT, Getter, Setter, TextMapPropagator, default_getter, default_setter, ) from opentelemetry.trace import ( NonRecordingSpan, SpanContext, TraceFlags, ) from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, ) from sentry_sdk.integrations.opentelemetry.span_processor import ( SentrySpanProcessor, ) from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, ) from sentry_sdk.tracing_utils import Baggage, extract_sentrytrace_data from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional, Set class SentryPropagator(TextMapPropagator): """ Propagates tracing headers for Sentry's tracing system in a way OTel understands. """ def extract(self, carrier, context=None, getter=default_getter): # type: (CarrierT, Optional[Context], Getter[CarrierT]) -> Context if context is None: context = get_current() sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME) if not sentry_trace: return context sentrytrace = extract_sentrytrace_data(sentry_trace[0]) if not sentrytrace: return context context = set_value(SENTRY_TRACE_KEY, sentrytrace, context) trace_id, span_id = sentrytrace["trace_id"], sentrytrace["parent_span_id"] span_context = SpanContext( trace_id=int(trace_id, 16), # type: ignore span_id=int(span_id, 16), # type: ignore # we simulate a sampled trace on the otel side and leave the sampling to sentry trace_flags=TraceFlags(TraceFlags.SAMPLED), is_remote=True, ) baggage_header = getter.get(carrier, BAGGAGE_HEADER_NAME) if baggage_header: baggage = Baggage.from_incoming_header(baggage_header[0]) else: # If there's an incoming sentry-trace but no incoming baggage header, # for instance in traces coming from older SDKs, # baggage will be empty and frozen and won't be populated as head SDK. baggage = Baggage(sentry_items={}) baggage.freeze() context = set_value(SENTRY_BAGGAGE_KEY, baggage, context) span = NonRecordingSpan(span_context) modified_context = trace.set_span_in_context(span, context) return modified_context def inject(self, carrier, context=None, setter=default_setter): # type: (CarrierT, Optional[Context], Setter[CarrierT]) -> None if context is None: context = get_current() current_span = trace.get_current_span(context) current_span_context = current_span.get_span_context() if not current_span_context.is_valid: return span_id = trace.format_span_id(current_span_context.span_id) span_map = SentrySpanProcessor().otel_span_map sentry_span = span_map.get(span_id, None) if not sentry_span: return setter.set(carrier, SENTRY_TRACE_HEADER_NAME, sentry_span.to_traceparent()) if sentry_span.containing_transaction: baggage = sentry_span.containing_transaction.get_baggage() if baggage: baggage_data = baggage.serialize() if baggage_data: setter.set(carrier, BAGGAGE_HEADER_NAME, baggage_data) @property def fields(self): # type: () -> Set[str] return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME} sentry-python-2.18.0/sentry_sdk/integrations/opentelemetry/span_processor.py000066400000000000000000000317341471214654000276620ustar00rootroot00000000000000from datetime import datetime, timezone from time import time from typing import TYPE_CHECKING, cast from opentelemetry.context import get_value from opentelemetry.sdk.trace import SpanProcessor, ReadableSpan as OTelSpan from opentelemetry.semconv.trace import SpanAttributes from opentelemetry.trace import ( format_span_id, format_trace_id, get_current_span, SpanKind, ) from opentelemetry.trace.span import ( INVALID_SPAN_ID, INVALID_TRACE_ID, ) from sentry_sdk import get_client, start_transaction from sentry_sdk.consts import INSTRUMENTER, SPANSTATUS from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, ) from sentry_sdk.scope import add_global_event_processor from sentry_sdk.tracing import Transaction, Span as SentrySpan from sentry_sdk.utils import Dsn from urllib3.util import parse_url as urlparse if TYPE_CHECKING: from typing import Any, Optional, Union from opentelemetry import context as context_api from sentry_sdk._types import Event, Hint OPEN_TELEMETRY_CONTEXT = "otel" SPAN_MAX_TIME_OPEN_MINUTES = 10 SPAN_ORIGIN = "auto.otel" def link_trace_context_to_error_event(event, otel_span_map): # type: (Event, dict[str, Union[Transaction, SentrySpan]]) -> Event client = get_client() if client.options["instrumenter"] != INSTRUMENTER.OTEL: return event if hasattr(event, "type") and event["type"] == "transaction": return event otel_span = get_current_span() if not otel_span: return event ctx = otel_span.get_span_context() if ctx.trace_id == INVALID_TRACE_ID or ctx.span_id == INVALID_SPAN_ID: return event sentry_span = otel_span_map.get(format_span_id(ctx.span_id), None) if not sentry_span: return event contexts = event.setdefault("contexts", {}) contexts.setdefault("trace", {}).update(sentry_span.get_trace_context()) return event class SentrySpanProcessor(SpanProcessor): """ Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. """ # The mapping from otel span ids to sentry spans otel_span_map = {} # type: dict[str, Union[Transaction, SentrySpan]] # The currently open spans. Elements will be discarded after SPAN_MAX_TIME_OPEN_MINUTES open_spans = {} # type: dict[int, set[str]] def __new__(cls): # type: () -> SentrySpanProcessor if not hasattr(cls, "instance"): cls.instance = super().__new__(cls) return cls.instance def __init__(self): # type: () -> None @add_global_event_processor def global_event_processor(event, hint): # type: (Event, Hint) -> Event return link_trace_context_to_error_event(event, self.otel_span_map) def _prune_old_spans(self): # type: (SentrySpanProcessor) -> None """ Prune spans that have been open for too long. """ current_time_minutes = int(time() / 60) for span_start_minutes in list( self.open_spans.keys() ): # making a list because we change the dict # prune empty open spans buckets if self.open_spans[span_start_minutes] == set(): self.open_spans.pop(span_start_minutes) # prune old buckets elif current_time_minutes - span_start_minutes > SPAN_MAX_TIME_OPEN_MINUTES: for span_id in self.open_spans.pop(span_start_minutes): self.otel_span_map.pop(span_id, None) def on_start(self, otel_span, parent_context=None): # type: (OTelSpan, Optional[context_api.Context]) -> None client = get_client() if not client.dsn: return try: _ = Dsn(client.dsn) except Exception: return if client.options["instrumenter"] != INSTRUMENTER.OTEL: return if not otel_span.get_span_context().is_valid: return if self._is_sentry_span(otel_span): return trace_data = self._get_trace_data(otel_span, parent_context) parent_span_id = trace_data["parent_span_id"] sentry_parent_span = ( self.otel_span_map.get(parent_span_id) if parent_span_id else None ) start_timestamp = None if otel_span.start_time is not None: start_timestamp = datetime.fromtimestamp( otel_span.start_time / 1e9, timezone.utc ) # OTel spans have nanosecond precision sentry_span = None if sentry_parent_span: sentry_span = sentry_parent_span.start_child( span_id=trace_data["span_id"], name=otel_span.name, start_timestamp=start_timestamp, instrumenter=INSTRUMENTER.OTEL, origin=SPAN_ORIGIN, ) else: sentry_span = start_transaction( name=otel_span.name, span_id=trace_data["span_id"], parent_span_id=parent_span_id, trace_id=trace_data["trace_id"], baggage=trace_data["baggage"], start_timestamp=start_timestamp, instrumenter=INSTRUMENTER.OTEL, origin=SPAN_ORIGIN, ) self.otel_span_map[trace_data["span_id"]] = sentry_span if otel_span.start_time is not None: span_start_in_minutes = int( otel_span.start_time / 1e9 / 60 ) # OTel spans have nanosecond precision self.open_spans.setdefault(span_start_in_minutes, set()).add( trace_data["span_id"] ) self._prune_old_spans() def on_end(self, otel_span): # type: (OTelSpan) -> None client = get_client() if client.options["instrumenter"] != INSTRUMENTER.OTEL: return span_context = otel_span.get_span_context() if not span_context.is_valid: return span_id = format_span_id(span_context.span_id) sentry_span = self.otel_span_map.pop(span_id, None) if not sentry_span: return sentry_span.op = otel_span.name self._update_span_with_otel_status(sentry_span, otel_span) if isinstance(sentry_span, Transaction): sentry_span.name = otel_span.name sentry_span.set_context( OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span) ) self._update_transaction_with_otel_data(sentry_span, otel_span) else: self._update_span_with_otel_data(sentry_span, otel_span) end_timestamp = None if otel_span.end_time is not None: end_timestamp = datetime.fromtimestamp( otel_span.end_time / 1e9, timezone.utc ) # OTel spans have nanosecond precision sentry_span.finish(end_timestamp=end_timestamp) if otel_span.start_time is not None: span_start_in_minutes = int( otel_span.start_time / 1e9 / 60 ) # OTel spans have nanosecond precision self.open_spans.setdefault(span_start_in_minutes, set()).discard(span_id) self._prune_old_spans() def _is_sentry_span(self, otel_span): # type: (OTelSpan) -> bool """ Break infinite loop: HTTP requests to Sentry are caught by OTel and send again to Sentry. """ otel_span_url = None if otel_span.attributes is not None: otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL) otel_span_url = cast("Optional[str]", otel_span_url) dsn_url = None client = get_client() if client.dsn: dsn_url = Dsn(client.dsn).netloc if otel_span_url and dsn_url and dsn_url in otel_span_url: return True return False def _get_otel_context(self, otel_span): # type: (OTelSpan) -> dict[str, Any] """ Returns the OTel context for Sentry. See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context """ ctx = {} if otel_span.attributes: ctx["attributes"] = dict(otel_span.attributes) if otel_span.resource.attributes: ctx["resource"] = dict(otel_span.resource.attributes) return ctx def _get_trace_data(self, otel_span, parent_context): # type: (OTelSpan, Optional[context_api.Context]) -> dict[str, Any] """ Extracts tracing information from one OTel span and its parent OTel context. """ trace_data = {} # type: dict[str, Any] span_context = otel_span.get_span_context() span_id = format_span_id(span_context.span_id) trace_data["span_id"] = span_id trace_id = format_trace_id(span_context.trace_id) trace_data["trace_id"] = trace_id parent_span_id = ( format_span_id(otel_span.parent.span_id) if otel_span.parent else None ) trace_data["parent_span_id"] = parent_span_id sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context) sentry_trace_data = cast("dict[str, Union[str, bool, None]]", sentry_trace_data) trace_data["parent_sampled"] = ( sentry_trace_data["parent_sampled"] if sentry_trace_data else None ) baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context) trace_data["baggage"] = baggage return trace_data def _update_span_with_otel_status(self, sentry_span, otel_span): # type: (SentrySpan, OTelSpan) -> None """ Set the Sentry span status from the OTel span """ if otel_span.status.is_unset: return if otel_span.status.is_ok: sentry_span.set_status(SPANSTATUS.OK) return sentry_span.set_status(SPANSTATUS.INTERNAL_ERROR) def _update_span_with_otel_data(self, sentry_span, otel_span): # type: (SentrySpan, OTelSpan) -> None """ Convert OTel span data and update the Sentry span with it. This should eventually happen on the server when ingesting the spans. """ sentry_span.set_data("otel.kind", otel_span.kind) op = otel_span.name description = otel_span.name if otel_span.attributes is not None: for key, val in otel_span.attributes.items(): sentry_span.set_data(key, val) http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD) http_method = cast("Optional[str]", http_method) db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM) if http_method: op = "http" if otel_span.kind == SpanKind.SERVER: op += ".server" elif otel_span.kind == SpanKind.CLIENT: op += ".client" description = http_method peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None) if peer_name: description += " {}".format(peer_name) target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None) if target: description += " {}".format(target) if not peer_name and not target: url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None) url = cast("Optional[str]", url) if url: parsed_url = urlparse(url) url = "{}://{}{}".format( parsed_url.scheme, parsed_url.netloc, parsed_url.path ) description += " {}".format(url) status_code = otel_span.attributes.get( SpanAttributes.HTTP_STATUS_CODE, None ) status_code = cast("Optional[int]", status_code) if status_code: sentry_span.set_http_status(status_code) elif db_query: op = "db" statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None) statement = cast("Optional[str]", statement) if statement: description = statement sentry_span.op = op sentry_span.description = description def _update_transaction_with_otel_data(self, sentry_span, otel_span): # type: (SentrySpan, OTelSpan) -> None if otel_span.attributes is None: return http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD) if http_method: status_code = otel_span.attributes.get(SpanAttributes.HTTP_STATUS_CODE) status_code = cast("Optional[int]", status_code) if status_code: sentry_span.set_http_status(status_code) op = "http" if otel_span.kind == SpanKind.SERVER: op += ".server" elif otel_span.kind == SpanKind.CLIENT: op += ".client" sentry_span.op = op sentry-python-2.18.0/sentry_sdk/integrations/pure_eval.py000066400000000000000000000107451471214654000237070ustar00rootroot00000000000000import ast import sentry_sdk from sentry_sdk import serializer from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.scope import add_global_event_processor from sentry_sdk.utils import walk_exception_chain, iter_stacks from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional, Dict, Any, Tuple, List from types import FrameType from sentry_sdk._types import Event, Hint try: import executing except ImportError: raise DidNotEnable("executing is not installed") try: import pure_eval except ImportError: raise DidNotEnable("pure_eval is not installed") try: # Used implicitly, just testing it's available import asttokens # noqa except ImportError: raise DidNotEnable("asttokens is not installed") class PureEvalIntegration(Integration): identifier = "pure_eval" @staticmethod def setup_once(): # type: () -> None @add_global_event_processor def add_executing_info(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] if sentry_sdk.get_client().get_integration(PureEvalIntegration) is None: return event if hint is None: return event exc_info = hint.get("exc_info", None) if exc_info is None: return event exception = event.get("exception", None) if exception is None: return event values = exception.get("values", None) if values is None: return event for exception, (_exc_type, _exc_value, exc_tb) in zip( reversed(values), walk_exception_chain(exc_info) ): sentry_frames = [ frame for frame in exception.get("stacktrace", {}).get("frames", []) if frame.get("function") ] tbs = list(iter_stacks(exc_tb)) if len(sentry_frames) != len(tbs): continue for sentry_frame, tb in zip(sentry_frames, tbs): sentry_frame["vars"] = ( pure_eval_frame(tb.tb_frame) or sentry_frame["vars"] ) return event def pure_eval_frame(frame): # type: (FrameType) -> Dict[str, Any] source = executing.Source.for_frame(frame) if not source.tree: return {} statements = source.statements_at_line(frame.f_lineno) if not statements: return {} scope = stmt = list(statements)[0] while True: # Get the parent first in case the original statement is already # a function definition, e.g. if we're calling a decorator # In that case we still want the surrounding scope, not that function scope = scope.parent if isinstance(scope, (ast.FunctionDef, ast.ClassDef, ast.Module)): break evaluator = pure_eval.Evaluator.from_frame(frame) expressions = evaluator.interesting_expressions_grouped(scope) def closeness(expression): # type: (Tuple[List[Any], Any]) -> Tuple[int, int] # Prioritise expressions with a node closer to the statement executed # without being after that statement # A higher return value is better - the expression will appear # earlier in the list of values and is less likely to be trimmed nodes, _value = expression def start(n): # type: (ast.expr) -> Tuple[int, int] return (n.lineno, n.col_offset) nodes_before_stmt = [ node for node in nodes if start(node) < stmt.last_token.end # type: ignore ] if nodes_before_stmt: # The position of the last node before or in the statement return max(start(node) for node in nodes_before_stmt) else: # The position of the first node after the statement # Negative means it's always lower priority than nodes that come before # Less negative means closer to the statement and higher priority lineno, col_offset = min(start(node) for node in nodes) return (-lineno, -col_offset) # This adds the first_token and last_token attributes to nodes atok = source.asttokens() expressions.sort(key=closeness, reverse=True) vars = { atok.get_text(nodes[0]): value for nodes, value in expressions[: serializer.MAX_DATABAG_BREADTH] } return serializer.serialize(vars, is_vars=True) sentry-python-2.18.0/sentry_sdk/integrations/pymongo.py000066400000000000000000000143541471214654000234150ustar00rootroot00000000000000import copy import json import sentry_sdk from sentry_sdk.consts import SPANSTATUS, SPANDATA, OP from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions try: from pymongo import monitoring except ImportError: raise DidNotEnable("Pymongo not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Dict, Union from pymongo.monitoring import ( CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent, ) SAFE_COMMAND_ATTRIBUTES = [ "insert", "ordered", "find", "limit", "singleBatch", "aggregate", "createIndexes", "indexes", "delete", "findAndModify", "renameCollection", "to", "drop", ] def _strip_pii(command): # type: (Dict[str, Any]) -> Dict[str, Any] for key in command: is_safe_field = key in SAFE_COMMAND_ATTRIBUTES if is_safe_field: # Skip if safe key continue update_db_command = key == "update" and "findAndModify" not in command if update_db_command: # Also skip "update" db command because it is save. # There is also an "update" key in the "findAndModify" command, which is NOT safe! continue # Special stripping for documents is_document = key == "documents" if is_document: for doc in command[key]: for doc_key in doc: doc[doc_key] = "%s" continue # Special stripping for dict style fields is_dict_field = key in ["filter", "query", "update"] if is_dict_field: for item_key in command[key]: command[key][item_key] = "%s" continue # For pipeline fields strip the `$match` dict is_pipeline_field = key == "pipeline" if is_pipeline_field: for pipeline in command[key]: for match_key in pipeline["$match"] if "$match" in pipeline else []: pipeline["$match"][match_key] = "%s" continue # Default stripping command[key] = "%s" return command def _get_db_data(event): # type: (Any) -> Dict[str, Any] data = {} data[SPANDATA.DB_SYSTEM] = "mongodb" db_name = event.database_name if db_name is not None: data[SPANDATA.DB_NAME] = db_name server_address = event.connection_id[0] if server_address is not None: data[SPANDATA.SERVER_ADDRESS] = server_address server_port = event.connection_id[1] if server_port is not None: data[SPANDATA.SERVER_PORT] = server_port return data class CommandTracer(monitoring.CommandListener): def __init__(self): # type: () -> None self._ongoing_operations = {} # type: Dict[int, Span] def _operation_key(self, event): # type: (Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent]) -> int return event.request_id def started(self, event): # type: (CommandStartedEvent) -> None if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None: return with capture_internal_exceptions(): command = dict(copy.deepcopy(event.command)) command.pop("$db", None) command.pop("$clusterTime", None) command.pop("$signature", None) tags = { "db.name": event.database_name, SPANDATA.DB_SYSTEM: "mongodb", SPANDATA.DB_OPERATION: event.command_name, SPANDATA.DB_MONGODB_COLLECTION: command.get(event.command_name), } try: tags["net.peer.name"] = event.connection_id[0] tags["net.peer.port"] = str(event.connection_id[1]) except TypeError: pass data = {"operation_ids": {}} # type: Dict[str, Any] data["operation_ids"]["operation"] = event.operation_id data["operation_ids"]["request"] = event.request_id data.update(_get_db_data(event)) try: lsid = command.pop("lsid")["id"] data["operation_ids"]["session"] = str(lsid) except KeyError: pass if not should_send_default_pii(): command = _strip_pii(command) query = json.dumps(command, default=str) span = sentry_sdk.start_span( op=OP.DB, name=query, origin=PyMongoIntegration.origin, ) for tag, value in tags.items(): # set the tag for backwards-compatibility. # TODO: remove the set_tag call in the next major release! span.set_tag(tag, value) span.set_data(tag, value) for key, value in data.items(): span.set_data(key, value) with capture_internal_exceptions(): sentry_sdk.add_breadcrumb( message=query, category="query", type=OP.DB, data=tags ) self._ongoing_operations[self._operation_key(event)] = span.__enter__() def failed(self, event): # type: (CommandFailedEvent) -> None if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None: return try: span = self._ongoing_operations.pop(self._operation_key(event)) span.set_status(SPANSTATUS.INTERNAL_ERROR) span.__exit__(None, None, None) except KeyError: return def succeeded(self, event): # type: (CommandSucceededEvent) -> None if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None: return try: span = self._ongoing_operations.pop(self._operation_key(event)) span.set_status(SPANSTATUS.OK) span.__exit__(None, None, None) except KeyError: pass class PyMongoIntegration(Integration): identifier = "pymongo" origin = f"auto.db.{identifier}" @staticmethod def setup_once(): # type: () -> None monitoring.register(CommandTracer()) sentry-python-2.18.0/sentry_sdk/integrations/pyramid.py000066400000000000000000000163041471214654000233670ustar00rootroot00000000000000import functools import os import sys import weakref import sentry_sdk from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, reraise, ) try: from pyramid.httpexceptions import HTTPException from pyramid.request import Request except ImportError: raise DidNotEnable("Pyramid not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from pyramid.response import Response from typing import Any from sentry_sdk.integrations.wsgi import _ScopedResponse from typing import Callable from typing import Dict from typing import Optional from webob.cookies import RequestCookies from webob.request import _FieldStorageWithFile from sentry_sdk.utils import ExcInfo from sentry_sdk._types import Event, EventProcessor if getattr(Request, "authenticated_userid", None): def authenticated_userid(request): # type: (Request) -> Optional[Any] return request.authenticated_userid else: # bw-compat for pyramid < 1.5 from pyramid.security import authenticated_userid # type: ignore TRANSACTION_STYLE_VALUES = ("route_name", "route_pattern") class PyramidIntegration(Integration): identifier = "pyramid" origin = f"auto.http.{identifier}" transaction_style = "" def __init__(self, transaction_style="route_name"): # type: (str) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style @staticmethod def setup_once(): # type: () -> None from pyramid import router old_call_view = router._call_view @functools.wraps(old_call_view) def sentry_patched_call_view(registry, request, *args, **kwargs): # type: (Any, Request, *Any, **Any) -> Response integration = sentry_sdk.get_client().get_integration(PyramidIntegration) if integration is None: return old_call_view(registry, request, *args, **kwargs) _set_transaction_name_and_source( sentry_sdk.get_current_scope(), integration.transaction_style, request ) scope = sentry_sdk.get_isolation_scope() scope.add_event_processor( _make_event_processor(weakref.ref(request), integration) ) return old_call_view(registry, request, *args, **kwargs) router._call_view = sentry_patched_call_view if hasattr(Request, "invoke_exception_view"): old_invoke_exception_view = Request.invoke_exception_view def sentry_patched_invoke_exception_view(self, *args, **kwargs): # type: (Request, *Any, **Any) -> Any rv = old_invoke_exception_view(self, *args, **kwargs) if ( self.exc_info and all(self.exc_info) and rv.status_int == 500 and sentry_sdk.get_client().get_integration(PyramidIntegration) is not None ): _capture_exception(self.exc_info) return rv Request.invoke_exception_view = sentry_patched_invoke_exception_view old_wsgi_call = router.Router.__call__ @ensure_integration_enabled(PyramidIntegration, old_wsgi_call) def sentry_patched_wsgi_call(self, environ, start_response): # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse def sentry_patched_inner_wsgi_call(environ, start_response): # type: (Dict[str, Any], Callable[..., Any]) -> Any try: return old_wsgi_call(self, environ, start_response) except Exception: einfo = sys.exc_info() _capture_exception(einfo) reraise(*einfo) middleware = SentryWsgiMiddleware( sentry_patched_inner_wsgi_call, span_origin=PyramidIntegration.origin, ) return middleware(environ, start_response) router.Router.__call__ = sentry_patched_wsgi_call @ensure_integration_enabled(PyramidIntegration) def _capture_exception(exc_info): # type: (ExcInfo) -> None if exc_info[0] is None or issubclass(exc_info[0], HTTPException): return event, hint = event_from_exception( exc_info, client_options=sentry_sdk.get_client().options, mechanism={"type": "pyramid", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _set_transaction_name_and_source(scope, transaction_style, request): # type: (sentry_sdk.Scope, str, Request) -> None try: name_for_style = { "route_name": request.matched_route.name, "route_pattern": request.matched_route.pattern, } scope.set_transaction_name( name_for_style[transaction_style], source=SOURCE_FOR_STYLE[transaction_style], ) except Exception: pass class PyramidRequestExtractor(RequestExtractor): def url(self): # type: () -> str return self.request.path_url def env(self): # type: () -> Dict[str, str] return self.request.environ def cookies(self): # type: () -> RequestCookies return self.request.cookies def raw_data(self): # type: () -> str return self.request.text def form(self): # type: () -> Dict[str, str] return { key: value for key, value in self.request.POST.items() if not getattr(value, "filename", None) } def files(self): # type: () -> Dict[str, _FieldStorageWithFile] return { key: value for key, value in self.request.POST.items() if getattr(value, "filename", None) } def size_of_file(self, postdata): # type: (_FieldStorageWithFile) -> int file = postdata.file try: return os.fstat(file.fileno()).st_size except Exception: return 0 def _make_event_processor(weak_request, integration): # type: (Callable[[], Request], PyramidIntegration) -> EventProcessor def pyramid_event_processor(event, hint): # type: (Event, Dict[str, Any]) -> Event request = weak_request() if request is None: return event with capture_internal_exceptions(): PyramidRequestExtractor(request).extract_into_event(event) if should_send_default_pii(): with capture_internal_exceptions(): user_info = event.setdefault("user", {}) user_info.setdefault("id", authenticated_userid(request)) return event return pyramid_event_processor sentry-python-2.18.0/sentry_sdk/integrations/quart.py000066400000000000000000000164151471214654000230610ustar00rootroot00000000000000import asyncio import inspect from functools import wraps import sentry_sdk from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Union from sentry_sdk._types import Event, EventProcessor try: import quart_auth # type: ignore except ImportError: quart_auth = None try: from quart import ( # type: ignore has_request_context, has_websocket_context, Request, Quart, request, websocket, ) from quart.signals import ( # type: ignore got_background_exception, got_request_exception, got_websocket_exception, request_started, websocket_started, ) except ImportError: raise DidNotEnable("Quart is not installed") else: # Quart 0.19 is based on Flask and hence no longer has a Scaffold try: from quart.scaffold import Scaffold # type: ignore except ImportError: from flask.sansio.scaffold import Scaffold # type: ignore TRANSACTION_STYLE_VALUES = ("endpoint", "url") class QuartIntegration(Integration): identifier = "quart" origin = f"auto.http.{identifier}" transaction_style = "" def __init__(self, transaction_style="endpoint"): # type: (str) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style @staticmethod def setup_once(): # type: () -> None request_started.connect(_request_websocket_started) websocket_started.connect(_request_websocket_started) got_background_exception.connect(_capture_exception) got_request_exception.connect(_capture_exception) got_websocket_exception.connect(_capture_exception) patch_asgi_app() patch_scaffold_route() def patch_asgi_app(): # type: () -> None old_app = Quart.__call__ async def sentry_patched_asgi_app(self, scope, receive, send): # type: (Any, Any, Any, Any) -> Any if sentry_sdk.get_client().get_integration(QuartIntegration) is None: return await old_app(self, scope, receive, send) middleware = SentryAsgiMiddleware( lambda *a, **kw: old_app(self, *a, **kw), span_origin=QuartIntegration.origin, ) middleware.__call__ = middleware._run_asgi3 return await middleware(scope, receive, send) Quart.__call__ = sentry_patched_asgi_app def patch_scaffold_route(): # type: () -> None old_route = Scaffold.route def _sentry_route(*args, **kwargs): # type: (*Any, **Any) -> Any old_decorator = old_route(*args, **kwargs) def decorator(old_func): # type: (Any) -> Any if inspect.isfunction(old_func) and not asyncio.iscoroutinefunction( old_func ): @wraps(old_func) @ensure_integration_enabled(QuartIntegration, old_func) def _sentry_func(*args, **kwargs): # type: (*Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() if current_scope.transaction is not None: current_scope.transaction.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() return old_func(*args, **kwargs) return old_decorator(_sentry_func) return old_decorator(old_func) return decorator Scaffold.route = _sentry_route def _set_transaction_name_and_source(scope, transaction_style, request): # type: (sentry_sdk.Scope, str, Request) -> None try: name_for_style = { "url": request.url_rule.rule, "endpoint": request.url_rule.endpoint, } scope.set_transaction_name( name_for_style[transaction_style], source=SOURCE_FOR_STYLE[transaction_style], ) except Exception: pass async def _request_websocket_started(app, **kwargs): # type: (Quart, **Any) -> None integration = sentry_sdk.get_client().get_integration(QuartIntegration) if integration is None: return if has_request_context(): request_websocket = request._get_current_object() if has_websocket_context(): request_websocket = websocket._get_current_object() # Set the transaction name here, but rely on ASGI middleware # to actually start the transaction _set_transaction_name_and_source( sentry_sdk.get_current_scope(), integration.transaction_style, request_websocket ) scope = sentry_sdk.get_isolation_scope() evt_processor = _make_request_event_processor(app, request_websocket, integration) scope.add_event_processor(evt_processor) def _make_request_event_processor(app, request, integration): # type: (Quart, Request, QuartIntegration) -> EventProcessor def inner(event, hint): # type: (Event, dict[str, Any]) -> Event # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. if request is None: return event with capture_internal_exceptions(): # TODO: Figure out what to do with request body. Methods on request # are async, but event processors are not. request_info = event.setdefault("request", {}) request_info["url"] = request.url request_info["query_string"] = request.query_string request_info["method"] = request.method request_info["headers"] = _filter_headers(dict(request.headers)) if should_send_default_pii(): request_info["env"] = {"REMOTE_ADDR": request.access_route[0]} _add_user_to_event(event) return event return inner async def _capture_exception(sender, exception, **kwargs): # type: (Quart, Union[ValueError, BaseException], **Any) -> None integration = sentry_sdk.get_client().get_integration(QuartIntegration) if integration is None: return event, hint = event_from_exception( exception, client_options=sentry_sdk.get_client().options, mechanism={"type": "quart", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _add_user_to_event(event): # type: (Event) -> None if quart_auth is None: return user = quart_auth.current_user if user is None: return with capture_internal_exceptions(): user_info = event.setdefault("user", {}) user_info["id"] = quart_auth.current_user._auth_id sentry-python-2.18.0/sentry_sdk/integrations/ray.py000066400000000000000000000103021471214654000225050ustar00rootroot00000000000000import inspect import sys import sentry_sdk from sentry_sdk.consts import OP, SPANSTATUS from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK from sentry_sdk.utils import ( event_from_exception, logger, package_version, qualname_from_function, reraise, ) try: import ray # type: ignore[import-not-found] except ImportError: raise DidNotEnable("Ray not installed.") import functools from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Callable from typing import Any, Optional from sentry_sdk.utils import ExcInfo def _check_sentry_initialized(): # type: () -> None if sentry_sdk.get_client().is_active(): return logger.debug( "[Tracing] Sentry not initialized in ray cluster worker, performance data will be discarded." ) def _patch_ray_remote(): # type: () -> None old_remote = ray.remote @functools.wraps(old_remote) def new_remote(f, *args, **kwargs): # type: (Callable[..., Any], *Any, **Any) -> Callable[..., Any] if inspect.isclass(f): # Ray Actors # (https://docs.ray.io/en/latest/ray-core/actors.html) # are not supported # (Only Ray Tasks are supported) return old_remote(f, *args, *kwargs) def _f(*f_args, _tracing=None, **f_kwargs): # type: (Any, Optional[dict[str, Any]], Any) -> Any """ Ray Worker """ _check_sentry_initialized() transaction = sentry_sdk.continue_trace( _tracing or {}, op=OP.QUEUE_TASK_RAY, name=qualname_from_function(f), origin=RayIntegration.origin, source=TRANSACTION_SOURCE_TASK, ) with sentry_sdk.start_transaction(transaction) as transaction: try: result = f(*f_args, **f_kwargs) transaction.set_status(SPANSTATUS.OK) except Exception: transaction.set_status(SPANSTATUS.INTERNAL_ERROR) exc_info = sys.exc_info() _capture_exception(exc_info) reraise(*exc_info) return result rv = old_remote(_f, *args, *kwargs) old_remote_method = rv.remote def _remote_method_with_header_propagation(*args, **kwargs): # type: (*Any, **Any) -> Any """ Ray Client """ with sentry_sdk.start_span( op=OP.QUEUE_SUBMIT_RAY, name=qualname_from_function(f), origin=RayIntegration.origin, ) as span: tracing = { k: v for k, v in sentry_sdk.get_current_scope().iter_trace_propagation_headers() } try: result = old_remote_method(*args, **kwargs, _tracing=tracing) span.set_status(SPANSTATUS.OK) except Exception: span.set_status(SPANSTATUS.INTERNAL_ERROR) exc_info = sys.exc_info() _capture_exception(exc_info) reraise(*exc_info) return result rv.remote = _remote_method_with_header_propagation return rv ray.remote = new_remote def _capture_exception(exc_info, **kwargs): # type: (ExcInfo, **Any) -> None client = sentry_sdk.get_client() event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={ "handled": False, "type": RayIntegration.identifier, }, ) sentry_sdk.capture_event(event, hint=hint) class RayIntegration(Integration): identifier = "ray" origin = f"auto.queue.{identifier}" @staticmethod def setup_once(): # type: () -> None version = package_version("ray") if version is None: raise DidNotEnable("Unparsable ray version: {}".format(version)) if version < (2, 7, 0): raise DidNotEnable("Ray 2.7.0 or newer required") _patch_ray_remote() sentry-python-2.18.0/sentry_sdk/integrations/redis/000077500000000000000000000000001471214654000224525ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/integrations/redis/__init__.py000066400000000000000000000024641471214654000245710ustar00rootroot00000000000000from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.redis.consts import _DEFAULT_MAX_DATA_SIZE from sentry_sdk.integrations.redis.rb import _patch_rb from sentry_sdk.integrations.redis.redis import _patch_redis from sentry_sdk.integrations.redis.redis_cluster import _patch_redis_cluster from sentry_sdk.integrations.redis.redis_py_cluster_legacy import _patch_rediscluster from sentry_sdk.utils import logger from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional class RedisIntegration(Integration): identifier = "redis" def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE, cache_prefixes=None): # type: (int, Optional[list[str]]) -> None self.max_data_size = max_data_size self.cache_prefixes = cache_prefixes if cache_prefixes is not None else [] @staticmethod def setup_once(): # type: () -> None try: from redis import StrictRedis, client except ImportError: raise DidNotEnable("Redis client not installed") _patch_redis(StrictRedis, client) _patch_redis_cluster() _patch_rb() try: _patch_rediscluster() except Exception: logger.exception("Error occurred while patching `rediscluster` library") sentry-python-2.18.0/sentry_sdk/integrations/redis/_async_common.py000066400000000000000000000073171471214654000256600ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN from sentry_sdk.integrations.redis.modules.caches import ( _compile_cache_span_properties, _set_cache_data, ) from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties from sentry_sdk.integrations.redis.utils import ( _set_client_data, _set_pipeline_data, ) from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Callable from typing import Any, Union from redis.asyncio.client import Pipeline, StrictRedis from redis.asyncio.cluster import ClusterPipeline, RedisCluster def patch_redis_async_pipeline( pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn ): # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Span, Any], None]) -> None old_execute = pipeline_cls.execute from sentry_sdk.integrations.redis import RedisIntegration async def _sentry_execute(self, *args, **kwargs): # type: (Any, *Any, **Any) -> Any if sentry_sdk.get_client().get_integration(RedisIntegration) is None: return await old_execute(self, *args, **kwargs) with sentry_sdk.start_span( op=OP.DB_REDIS, name="redis.pipeline.execute", origin=SPAN_ORIGIN, ) as span: with capture_internal_exceptions(): set_db_data_fn(span, self) _set_pipeline_data( span, is_cluster, get_command_args_fn, False if is_cluster else self.is_transaction, self._command_stack if is_cluster else self.command_stack, ) return await old_execute(self, *args, **kwargs) pipeline_cls.execute = _sentry_execute # type: ignore def patch_redis_async_client(cls, is_cluster, set_db_data_fn): # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Span, Any], None]) -> None old_execute_command = cls.execute_command from sentry_sdk.integrations.redis import RedisIntegration async def _sentry_execute_command(self, name, *args, **kwargs): # type: (Any, str, *Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(RedisIntegration) if integration is None: return await old_execute_command(self, name, *args, **kwargs) cache_properties = _compile_cache_span_properties( name, args, kwargs, integration, ) cache_span = None if cache_properties["is_cache_key"] and cache_properties["op"] is not None: cache_span = sentry_sdk.start_span( op=cache_properties["op"], name=cache_properties["description"], origin=SPAN_ORIGIN, ) cache_span.__enter__() db_properties = _compile_db_span_properties(integration, name, args) db_span = sentry_sdk.start_span( op=db_properties["op"], name=db_properties["description"], origin=SPAN_ORIGIN, ) db_span.__enter__() set_db_data_fn(db_span, self) _set_client_data(db_span, is_cluster, name, *args) value = await old_execute_command(self, name, *args, **kwargs) db_span.__exit__(None, None, None) if cache_span: _set_cache_data(cache_span, self, cache_properties, value) cache_span.__exit__(None, None, None) return value cls.execute_command = _sentry_execute_command # type: ignore sentry-python-2.18.0/sentry_sdk/integrations/redis/_sync_common.py000066400000000000000000000067751471214654000255260ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN from sentry_sdk.integrations.redis.modules.caches import ( _compile_cache_span_properties, _set_cache_data, ) from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties from sentry_sdk.integrations.redis.utils import ( _set_client_data, _set_pipeline_data, ) from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Callable from typing import Any def patch_redis_pipeline( pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn, ): # type: (Any, bool, Any, Callable[[Span, Any], None]) -> None old_execute = pipeline_cls.execute from sentry_sdk.integrations.redis import RedisIntegration def sentry_patched_execute(self, *args, **kwargs): # type: (Any, *Any, **Any) -> Any if sentry_sdk.get_client().get_integration(RedisIntegration) is None: return old_execute(self, *args, **kwargs) with sentry_sdk.start_span( op=OP.DB_REDIS, name="redis.pipeline.execute", origin=SPAN_ORIGIN, ) as span: with capture_internal_exceptions(): set_db_data_fn(span, self) _set_pipeline_data( span, is_cluster, get_command_args_fn, False if is_cluster else self.transaction, self.command_stack, ) return old_execute(self, *args, **kwargs) pipeline_cls.execute = sentry_patched_execute def patch_redis_client(cls, is_cluster, set_db_data_fn): # type: (Any, bool, Callable[[Span, Any], None]) -> None """ This function can be used to instrument custom redis client classes or subclasses. """ old_execute_command = cls.execute_command from sentry_sdk.integrations.redis import RedisIntegration def sentry_patched_execute_command(self, name, *args, **kwargs): # type: (Any, str, *Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(RedisIntegration) if integration is None: return old_execute_command(self, name, *args, **kwargs) cache_properties = _compile_cache_span_properties( name, args, kwargs, integration, ) cache_span = None if cache_properties["is_cache_key"] and cache_properties["op"] is not None: cache_span = sentry_sdk.start_span( op=cache_properties["op"], name=cache_properties["description"], origin=SPAN_ORIGIN, ) cache_span.__enter__() db_properties = _compile_db_span_properties(integration, name, args) db_span = sentry_sdk.start_span( op=db_properties["op"], name=db_properties["description"], origin=SPAN_ORIGIN, ) db_span.__enter__() set_db_data_fn(db_span, self) _set_client_data(db_span, is_cluster, name, *args) value = old_execute_command(self, name, *args, **kwargs) db_span.__exit__(None, None, None) if cache_span: _set_cache_data(cache_span, self, cache_properties, value) cache_span.__exit__(None, None, None) return value cls.execute_command = sentry_patched_execute_command sentry-python-2.18.0/sentry_sdk/integrations/redis/consts.py000066400000000000000000000007401471214654000243360ustar00rootroot00000000000000SPAN_ORIGIN = "auto.db.redis" _SINGLE_KEY_COMMANDS = frozenset( ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"], ) _MULTI_KEY_COMMANDS = frozenset( [ "del", "touch", "unlink", "mget", ], ) _COMMANDS_INCLUDING_SENSITIVE_DATA = [ "auth", ] _MAX_NUM_ARGS = 10 # Trim argument lists to this many values _MAX_NUM_COMMANDS = 10 # Trim command lists to this many values _DEFAULT_MAX_DATA_SIZE = 1024 sentry-python-2.18.0/sentry_sdk/integrations/redis/modules/000077500000000000000000000000001471214654000241225ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/integrations/redis/modules/__init__.py000066400000000000000000000000001471214654000262210ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/integrations/redis/modules/caches.py000066400000000000000000000077371471214654000257400ustar00rootroot00000000000000""" Code used for the Caches module in Sentry """ from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations.redis.utils import _get_safe_key, _key_as_string from sentry_sdk.utils import capture_internal_exceptions GET_COMMANDS = ("get", "mget") SET_COMMANDS = ("set", "setex") from typing import TYPE_CHECKING if TYPE_CHECKING: from sentry_sdk.integrations.redis import RedisIntegration from sentry_sdk.tracing import Span from typing import Any, Optional def _get_op(name): # type: (str) -> Optional[str] op = None if name.lower() in GET_COMMANDS: op = OP.CACHE_GET elif name.lower() in SET_COMMANDS: op = OP.CACHE_PUT return op def _compile_cache_span_properties(redis_command, args, kwargs, integration): # type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> dict[str, Any] key = _get_safe_key(redis_command, args, kwargs) key_as_string = _key_as_string(key) keys_as_string = key_as_string.split(", ") is_cache_key = False for prefix in integration.cache_prefixes: for kee in keys_as_string: if kee.startswith(prefix): is_cache_key = True break if is_cache_key: break value = None if redis_command.lower() in SET_COMMANDS: value = args[-1] properties = { "op": _get_op(redis_command), "description": _get_cache_span_description( redis_command, args, kwargs, integration ), "key": key, "key_as_string": key_as_string, "redis_command": redis_command.lower(), "is_cache_key": is_cache_key, "value": value, } return properties def _get_cache_span_description(redis_command, args, kwargs, integration): # type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> str description = _key_as_string(_get_safe_key(redis_command, args, kwargs)) data_should_be_truncated = ( integration.max_data_size and len(description) > integration.max_data_size ) if data_should_be_truncated: description = description[: integration.max_data_size - len("...")] + "..." return description def _set_cache_data(span, redis_client, properties, return_value): # type: (Span, Any, dict[str, Any], Optional[Any]) -> None with capture_internal_exceptions(): span.set_data(SPANDATA.CACHE_KEY, properties["key"]) if properties["redis_command"] in GET_COMMANDS: if return_value is not None: span.set_data(SPANDATA.CACHE_HIT, True) size = ( len(str(return_value).encode("utf-8")) if not isinstance(return_value, bytes) else len(return_value) ) span.set_data(SPANDATA.CACHE_ITEM_SIZE, size) else: span.set_data(SPANDATA.CACHE_HIT, False) elif properties["redis_command"] in SET_COMMANDS: if properties["value"] is not None: size = ( len(properties["value"].encode("utf-8")) if not isinstance(properties["value"], bytes) else len(properties["value"]) ) span.set_data(SPANDATA.CACHE_ITEM_SIZE, size) try: connection_params = redis_client.connection_pool.connection_kwargs except AttributeError: # If it is a cluster, there is no connection_pool attribute so we # need to get the default node from the cluster instance default_node = redis_client.get_default_node() connection_params = { "host": default_node.host, "port": default_node.port, } host = connection_params.get("host") if host is not None: span.set_data(SPANDATA.NETWORK_PEER_ADDRESS, host) port = connection_params.get("port") if port is not None: span.set_data(SPANDATA.NETWORK_PEER_PORT, port) sentry-python-2.18.0/sentry_sdk/integrations/redis/modules/queries.py000066400000000000000000000037631471214654000261620ustar00rootroot00000000000000""" Code used for the Queries module in Sentry """ from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations.redis.utils import _get_safe_command from sentry_sdk.utils import capture_internal_exceptions from typing import TYPE_CHECKING if TYPE_CHECKING: from redis import Redis from sentry_sdk.integrations.redis import RedisIntegration from sentry_sdk.tracing import Span from typing import Any def _compile_db_span_properties(integration, redis_command, args): # type: (RedisIntegration, str, tuple[Any, ...]) -> dict[str, Any] description = _get_db_span_description(integration, redis_command, args) properties = { "op": OP.DB_REDIS, "description": description, } return properties def _get_db_span_description(integration, command_name, args): # type: (RedisIntegration, str, tuple[Any, ...]) -> str description = command_name with capture_internal_exceptions(): description = _get_safe_command(command_name, args) data_should_be_truncated = ( integration.max_data_size and len(description) > integration.max_data_size ) if data_should_be_truncated: description = description[: integration.max_data_size - len("...")] + "..." return description def _set_db_data_on_span(span, connection_params): # type: (Span, dict[str, Any]) -> None span.set_data(SPANDATA.DB_SYSTEM, "redis") db = connection_params.get("db") if db is not None: span.set_data(SPANDATA.DB_NAME, str(db)) host = connection_params.get("host") if host is not None: span.set_data(SPANDATA.SERVER_ADDRESS, host) port = connection_params.get("port") if port is not None: span.set_data(SPANDATA.SERVER_PORT, port) def _set_db_data(span, redis_instance): # type: (Span, Redis[Any]) -> None try: _set_db_data_on_span(span, redis_instance.connection_pool.connection_kwargs) except AttributeError: pass # connections_kwargs may be missing in some cases sentry-python-2.18.0/sentry_sdk/integrations/redis/rb.py000066400000000000000000000014461471214654000234340ustar00rootroot00000000000000""" Instrumentation for Redis Blaster (rb) https://github.com/getsentry/rb """ from sentry_sdk.integrations.redis._sync_common import patch_redis_client from sentry_sdk.integrations.redis.modules.queries import _set_db_data def _patch_rb(): # type: () -> None try: import rb.clients # type: ignore except ImportError: pass else: patch_redis_client( rb.clients.FanoutClient, is_cluster=False, set_db_data_fn=_set_db_data, ) patch_redis_client( rb.clients.MappingClient, is_cluster=False, set_db_data_fn=_set_db_data, ) patch_redis_client( rb.clients.RoutingClient, is_cluster=False, set_db_data_fn=_set_db_data, ) sentry-python-2.18.0/sentry_sdk/integrations/redis/redis.py000066400000000000000000000032461471214654000241370ustar00rootroot00000000000000""" Instrumentation for Redis https://github.com/redis/redis-py """ from sentry_sdk.integrations.redis._sync_common import ( patch_redis_client, patch_redis_pipeline, ) from sentry_sdk.integrations.redis.modules.queries import _set_db_data from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Sequence def _get_redis_command_args(command): # type: (Any) -> Sequence[Any] return command[0] def _patch_redis(StrictRedis, client): # noqa: N803 # type: (Any, Any) -> None patch_redis_client( StrictRedis, is_cluster=False, set_db_data_fn=_set_db_data, ) patch_redis_pipeline( client.Pipeline, is_cluster=False, get_command_args_fn=_get_redis_command_args, set_db_data_fn=_set_db_data, ) try: strict_pipeline = client.StrictPipeline except AttributeError: pass else: patch_redis_pipeline( strict_pipeline, is_cluster=False, get_command_args_fn=_get_redis_command_args, set_db_data_fn=_set_db_data, ) try: import redis.asyncio except ImportError: pass else: from sentry_sdk.integrations.redis._async_common import ( patch_redis_async_client, patch_redis_async_pipeline, ) patch_redis_async_client( redis.asyncio.client.StrictRedis, is_cluster=False, set_db_data_fn=_set_db_data, ) patch_redis_async_pipeline( redis.asyncio.client.Pipeline, False, _get_redis_command_args, set_db_data_fn=_set_db_data, ) sentry-python-2.18.0/sentry_sdk/integrations/redis/redis_cluster.py000066400000000000000000000064051471214654000257000ustar00rootroot00000000000000""" Instrumentation for RedisCluster This is part of the main redis-py client. https://github.com/redis/redis-py/blob/master/redis/cluster.py """ from sentry_sdk.integrations.redis._sync_common import ( patch_redis_client, patch_redis_pipeline, ) from sentry_sdk.integrations.redis.modules.queries import _set_db_data_on_span from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command from sentry_sdk.utils import capture_internal_exceptions from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from redis import RedisCluster from redis.asyncio.cluster import ( RedisCluster as AsyncRedisCluster, ClusterPipeline as AsyncClusterPipeline, ) from sentry_sdk.tracing import Span def _set_async_cluster_db_data(span, async_redis_cluster_instance): # type: (Span, AsyncRedisCluster[Any]) -> None default_node = async_redis_cluster_instance.get_default_node() if default_node is not None and default_node.connection_kwargs is not None: _set_db_data_on_span(span, default_node.connection_kwargs) def _set_async_cluster_pipeline_db_data(span, async_redis_cluster_pipeline_instance): # type: (Span, AsyncClusterPipeline[Any]) -> None with capture_internal_exceptions(): _set_async_cluster_db_data( span, # the AsyncClusterPipeline has always had a `_client` attr but it is private so potentially problematic and mypy # does not recognize it - see https://github.com/redis/redis-py/blame/v5.0.0/redis/asyncio/cluster.py#L1386 async_redis_cluster_pipeline_instance._client, # type: ignore[attr-defined] ) def _set_cluster_db_data(span, redis_cluster_instance): # type: (Span, RedisCluster[Any]) -> None default_node = redis_cluster_instance.get_default_node() if default_node is not None: connection_params = { "host": default_node.host, "port": default_node.port, } _set_db_data_on_span(span, connection_params) def _patch_redis_cluster(): # type: () -> None """Patches the cluster module on redis SDK (as opposed to rediscluster library)""" try: from redis import RedisCluster, cluster except ImportError: pass else: patch_redis_client( RedisCluster, is_cluster=True, set_db_data_fn=_set_cluster_db_data, ) patch_redis_pipeline( cluster.ClusterPipeline, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, set_db_data_fn=_set_cluster_db_data, ) try: from redis.asyncio import cluster as async_cluster except ImportError: pass else: from sentry_sdk.integrations.redis._async_common import ( patch_redis_async_client, patch_redis_async_pipeline, ) patch_redis_async_client( async_cluster.RedisCluster, is_cluster=True, set_db_data_fn=_set_async_cluster_db_data, ) patch_redis_async_pipeline( async_cluster.ClusterPipeline, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, set_db_data_fn=_set_async_cluster_pipeline_db_data, ) sentry-python-2.18.0/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py000066400000000000000000000030611471214654000277270ustar00rootroot00000000000000""" Instrumentation for redis-py-cluster The project redis-py-cluster is EOL and was integrated into redis-py starting from version 4.1.0 (Dec 26, 2021). https://github.com/grokzen/redis-py-cluster """ from sentry_sdk.integrations.redis._sync_common import ( patch_redis_client, patch_redis_pipeline, ) from sentry_sdk.integrations.redis.modules.queries import _set_db_data from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command def _patch_rediscluster(): # type: () -> None try: import rediscluster # type: ignore except ImportError: return patch_redis_client( rediscluster.RedisCluster, is_cluster=True, set_db_data_fn=_set_db_data, ) # up to v1.3.6, __version__ attribute is a tuple # from v2.0.0, __version__ is a string and VERSION a tuple version = getattr(rediscluster, "VERSION", rediscluster.__version__) # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0 # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst if (0, 2, 0) < version < (2, 0, 0): pipeline_cls = rediscluster.pipeline.StrictClusterPipeline patch_redis_client( rediscluster.StrictRedisCluster, is_cluster=True, set_db_data_fn=_set_db_data, ) else: pipeline_cls = rediscluster.pipeline.ClusterPipeline patch_redis_pipeline( pipeline_cls, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, set_db_data_fn=_set_db_data, ) sentry-python-2.18.0/sentry_sdk/integrations/redis/utils.py000066400000000000000000000075641471214654000242000ustar00rootroot00000000000000from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis.consts import ( _COMMANDS_INCLUDING_SENSITIVE_DATA, _MAX_NUM_ARGS, _MAX_NUM_COMMANDS, _MULTI_KEY_COMMANDS, _SINGLE_KEY_COMMANDS, ) from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import SENSITIVE_DATA_SUBSTITUTE from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Optional, Sequence from sentry_sdk.tracing import Span def _get_safe_command(name, args): # type: (str, Sequence[Any]) -> str command_parts = [name] for i, arg in enumerate(args): if i > _MAX_NUM_ARGS: break name_low = name.lower() if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA: command_parts.append(SENSITIVE_DATA_SUBSTITUTE) continue arg_is_the_key = i == 0 if arg_is_the_key: command_parts.append(repr(arg)) else: if should_send_default_pii(): command_parts.append(repr(arg)) else: command_parts.append(SENSITIVE_DATA_SUBSTITUTE) command = " ".join(command_parts) return command def _safe_decode(key): # type: (Any) -> str if isinstance(key, bytes): try: return key.decode() except UnicodeDecodeError: return "" return str(key) def _key_as_string(key): # type: (Any) -> str if isinstance(key, (dict, list, tuple)): key = ", ".join(_safe_decode(x) for x in key) elif isinstance(key, bytes): key = _safe_decode(key) elif key is None: key = "" else: key = str(key) return key def _get_safe_key(method_name, args, kwargs): # type: (str, Optional[tuple[Any, ...]], Optional[dict[str, Any]]) -> Optional[tuple[str, ...]] """ Gets the key (or keys) from the given method_name. The method_name could be a redis command or a django caching command """ key = None if args is not None and method_name.lower() in _MULTI_KEY_COMMANDS: # for example redis "mget" key = tuple(args) elif args is not None and len(args) >= 1: # for example django "set_many/get_many" or redis "get" if isinstance(args[0], (dict, list, tuple)): key = tuple(args[0]) else: key = (args[0],) elif kwargs is not None and "key" in kwargs: # this is a legacy case for older versions of Django if isinstance(kwargs["key"], (list, tuple)): if len(kwargs["key"]) > 0: key = tuple(kwargs["key"]) else: if kwargs["key"] is not None: key = (kwargs["key"],) return key def _parse_rediscluster_command(command): # type: (Any) -> Sequence[Any] return command.args def _set_pipeline_data( span, is_cluster, get_command_args_fn, is_transaction, command_stack ): # type: (Span, bool, Any, bool, Sequence[Any]) -> None span.set_tag("redis.is_cluster", is_cluster) span.set_tag("redis.transaction", is_transaction) commands = [] for i, arg in enumerate(command_stack): if i >= _MAX_NUM_COMMANDS: break command = get_command_args_fn(arg) commands.append(_get_safe_command(command[0], command[1:])) span.set_data( "redis.commands", { "count": len(command_stack), "first_ten": commands, }, ) def _set_client_data(span, is_cluster, name, *args): # type: (Span, bool, str, *Any) -> None span.set_tag("redis.is_cluster", is_cluster) if name: span.set_tag("redis.command", name) span.set_tag(SPANDATA.DB_OPERATION, name) if name and args: name_low = name.lower() if (name_low in _SINGLE_KEY_COMMANDS) or ( name_low in _MULTI_KEY_COMMANDS and len(args) == 1 ): span.set_tag("redis.key", args[0]) sentry-python-2.18.0/sentry_sdk/integrations/rq.py000066400000000000000000000123661471214654000223500ustar00rootroot00000000000000import weakref import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.api import continue_trace from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, format_timestamp, parse_version, ) try: from rq.queue import Queue from rq.timeouts import JobTimeoutException from rq.version import VERSION as RQ_VERSION from rq.worker import Worker from rq.job import JobStatus except ImportError: raise DidNotEnable("RQ not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable from sentry_sdk._types import Event, EventProcessor from sentry_sdk.utils import ExcInfo from rq.job import Job class RqIntegration(Integration): identifier = "rq" origin = f"auto.queue.{identifier}" @staticmethod def setup_once(): # type: () -> None version = parse_version(RQ_VERSION) if version is None: raise DidNotEnable("Unparsable RQ version: {}".format(RQ_VERSION)) if version < (0, 6): raise DidNotEnable("RQ 0.6 or newer is required.") old_perform_job = Worker.perform_job @ensure_integration_enabled(RqIntegration, old_perform_job) def sentry_patched_perform_job(self, job, *args, **kwargs): # type: (Any, Job, *Queue, **Any) -> bool with sentry_sdk.new_scope() as scope: scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(weakref.ref(job))) transaction = continue_trace( job.meta.get("_sentry_trace_headers") or {}, op=OP.QUEUE_TASK_RQ, name="unknown RQ task", source=TRANSACTION_SOURCE_TASK, origin=RqIntegration.origin, ) with capture_internal_exceptions(): transaction.name = job.func_name with sentry_sdk.start_transaction( transaction, custom_sampling_context={"rq_job": job}, ): rv = old_perform_job(self, job, *args, **kwargs) if self.is_horse: # We're inside of a forked process and RQ is # about to call `os._exit`. Make sure that our # events get sent out. sentry_sdk.get_client().flush() return rv Worker.perform_job = sentry_patched_perform_job old_handle_exception = Worker.handle_exception def sentry_patched_handle_exception(self, job, *exc_info, **kwargs): # type: (Worker, Any, *Any, **Any) -> Any # Note, the order of the `or` here is important, # because calling `job.is_failed` will change `_status`. if job._status == JobStatus.FAILED or job.is_failed: _capture_exception(exc_info) return old_handle_exception(self, job, *exc_info, **kwargs) Worker.handle_exception = sentry_patched_handle_exception old_enqueue_job = Queue.enqueue_job @ensure_integration_enabled(RqIntegration, old_enqueue_job) def sentry_patched_enqueue_job(self, job, **kwargs): # type: (Queue, Any, **Any) -> Any scope = sentry_sdk.get_current_scope() if scope.span is not None: job.meta["_sentry_trace_headers"] = dict( scope.iter_trace_propagation_headers() ) return old_enqueue_job(self, job, **kwargs) Queue.enqueue_job = sentry_patched_enqueue_job ignore_logger("rq.worker") def _make_event_processor(weak_job): # type: (Callable[[], Job]) -> EventProcessor def event_processor(event, hint): # type: (Event, dict[str, Any]) -> Event job = weak_job() if job is not None: with capture_internal_exceptions(): extra = event.setdefault("extra", {}) rq_job = { "job_id": job.id, "func": job.func_name, "args": job.args, "kwargs": job.kwargs, "description": job.description, } if job.enqueued_at: rq_job["enqueued_at"] = format_timestamp(job.enqueued_at) if job.started_at: rq_job["started_at"] = format_timestamp(job.started_at) extra["rq-job"] = rq_job if "exc_info" in hint: with capture_internal_exceptions(): if issubclass(hint["exc_info"][0], JobTimeoutException): event["fingerprint"] = ["rq", "JobTimeoutException", job.func_name] return event return event_processor def _capture_exception(exc_info, **kwargs): # type: (ExcInfo, **Any) -> None client = sentry_sdk.get_client() event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "rq", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) sentry-python-2.18.0/sentry_sdk/integrations/sanic.py000066400000000000000000000314551471214654000230230ustar00rootroot00000000000000import sys import weakref from inspect import isawaitable from urllib.parse import urlsplit import sentry_sdk from sentry_sdk import continue_trace from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, parse_version, reraise, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Container from typing import Any from typing import Callable from typing import Optional from typing import Union from typing import Dict from sanic.request import Request, RequestParameters from sanic.response import BaseHTTPResponse from sentry_sdk._types import Event, EventProcessor, ExcInfo, Hint from sanic.router import Route try: from sanic import Sanic, __version__ as SANIC_VERSION from sanic.exceptions import SanicException from sanic.router import Router from sanic.handlers import ErrorHandler except ImportError: raise DidNotEnable("Sanic not installed") old_error_handler_lookup = ErrorHandler.lookup old_handle_request = Sanic.handle_request old_router_get = Router.get try: # This method was introduced in Sanic v21.9 old_startup = Sanic._startup except AttributeError: pass class SanicIntegration(Integration): identifier = "sanic" origin = f"auto.http.{identifier}" version = None def __init__(self, unsampled_statuses=frozenset({404})): # type: (Optional[Container[int]]) -> None """ The unsampled_statuses parameter can be used to specify for which HTTP statuses the transactions should not be sent to Sentry. By default, transactions are sent for all HTTP statuses, except 404. Set unsampled_statuses to None to send transactions for all HTTP statuses, including 404. """ self._unsampled_statuses = unsampled_statuses or set() @staticmethod def setup_once(): # type: () -> None SanicIntegration.version = parse_version(SANIC_VERSION) if SanicIntegration.version is None: raise DidNotEnable("Unparsable Sanic version: {}".format(SANIC_VERSION)) if SanicIntegration.version < (0, 8): raise DidNotEnable("Sanic 0.8 or newer required.") if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. raise DidNotEnable( "The sanic integration for Sentry requires Python 3.7+ " " or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE ) if SANIC_VERSION.startswith("0.8."): # Sanic 0.8 and older creates a logger named "root" and puts a # stringified version of every exception in there (without exc_info), # which our error deduplication can't detect. # # We explicitly check the version here because it is a very # invasive step to ignore this logger and not necessary in newer # versions at all. # # https://github.com/huge-success/sanic/issues/1332 ignore_logger("root") if SanicIntegration.version < (21, 9): _setup_legacy_sanic() return _setup_sanic() class SanicRequestExtractor(RequestExtractor): def content_length(self): # type: () -> int if self.request.body is None: return 0 return len(self.request.body) def cookies(self): # type: () -> Dict[str, str] return dict(self.request.cookies) def raw_data(self): # type: () -> bytes return self.request.body def form(self): # type: () -> RequestParameters return self.request.form def is_json(self): # type: () -> bool raise NotImplementedError() def json(self): # type: () -> Optional[Any] return self.request.json def files(self): # type: () -> RequestParameters return self.request.files def size_of_file(self, file): # type: (Any) -> int return len(file.body or ()) def _setup_sanic(): # type: () -> None Sanic._startup = _startup ErrorHandler.lookup = _sentry_error_handler_lookup def _setup_legacy_sanic(): # type: () -> None Sanic.handle_request = _legacy_handle_request Router.get = _legacy_router_get ErrorHandler.lookup = _sentry_error_handler_lookup async def _startup(self): # type: (Sanic) -> None # This happens about as early in the lifecycle as possible, just after the # Request object is created. The body has not yet been consumed. self.signal("http.lifecycle.request")(_context_enter) # This happens after the handler is complete. In v21.9 this signal is not # dispatched when there is an exception. Therefore we need to close out # and call _context_exit from the custom exception handler as well. # See https://github.com/sanic-org/sanic/issues/2297 self.signal("http.lifecycle.response")(_context_exit) # This happens inside of request handling immediately after the route # has been identified by the router. self.signal("http.routing.after")(_set_transaction) # The above signals need to be declared before this can be called. await old_startup(self) async def _context_enter(request): # type: (Request) -> None request.ctx._sentry_do_integration = ( sentry_sdk.get_client().get_integration(SanicIntegration) is not None ) if not request.ctx._sentry_do_integration: return weak_request = weakref.ref(request) request.ctx._sentry_scope = sentry_sdk.isolation_scope() scope = request.ctx._sentry_scope.__enter__() scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) transaction = continue_trace( dict(request.headers), op=OP.HTTP_SERVER, # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction name=request.path, source=TRANSACTION_SOURCE_URL, origin=SanicIntegration.origin, ) request.ctx._sentry_transaction = sentry_sdk.start_transaction( transaction ).__enter__() async def _context_exit(request, response=None): # type: (Request, Optional[BaseHTTPResponse]) -> None with capture_internal_exceptions(): if not request.ctx._sentry_do_integration: return integration = sentry_sdk.get_client().get_integration(SanicIntegration) response_status = None if response is None else response.status # This capture_internal_exceptions block has been intentionally nested here, so that in case an exception # happens while trying to end the transaction, we still attempt to exit the hub. with capture_internal_exceptions(): request.ctx._sentry_transaction.set_http_status(response_status) request.ctx._sentry_transaction.sampled &= ( isinstance(integration, SanicIntegration) and response_status not in integration._unsampled_statuses ) request.ctx._sentry_transaction.__exit__(None, None, None) request.ctx._sentry_scope.__exit__(None, None, None) async def _set_transaction(request, route, **_): # type: (Request, Route, **Any) -> None if request.ctx._sentry_do_integration: with capture_internal_exceptions(): scope = sentry_sdk.get_current_scope() route_name = route.name.replace(request.app.name, "").strip(".") scope.set_transaction_name(route_name, source=TRANSACTION_SOURCE_COMPONENT) def _sentry_error_handler_lookup(self, exception, *args, **kwargs): # type: (Any, Exception, *Any, **Any) -> Optional[object] _capture_exception(exception) old_error_handler = old_error_handler_lookup(self, exception, *args, **kwargs) if old_error_handler is None: return None if sentry_sdk.get_client().get_integration(SanicIntegration) is None: return old_error_handler async def sentry_wrapped_error_handler(request, exception): # type: (Request, Exception) -> Any try: response = old_error_handler(request, exception) if isawaitable(response): response = await response return response except Exception: # Report errors that occur in Sanic error handler. These # exceptions will not even show up in Sanic's # `sanic.exceptions` logger. exc_info = sys.exc_info() _capture_exception(exc_info) reraise(*exc_info) finally: # As mentioned in previous comment in _startup, this can be removed # after https://github.com/sanic-org/sanic/issues/2297 is resolved if SanicIntegration.version and SanicIntegration.version == (21, 9): await _context_exit(request) return sentry_wrapped_error_handler async def _legacy_handle_request(self, request, *args, **kwargs): # type: (Any, Request, *Any, **Any) -> Any if sentry_sdk.get_client().get_integration(SanicIntegration) is None: return await old_handle_request(self, request, *args, **kwargs) weak_request = weakref.ref(request) with sentry_sdk.isolation_scope() as scope: scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) response = old_handle_request(self, request, *args, **kwargs) if isawaitable(response): response = await response return response def _legacy_router_get(self, *args): # type: (Any, Union[Any, Request]) -> Any rv = old_router_get(self, *args) if sentry_sdk.get_client().get_integration(SanicIntegration) is not None: with capture_internal_exceptions(): scope = sentry_sdk.get_isolation_scope() if SanicIntegration.version and SanicIntegration.version >= (21, 3): # Sanic versions above and including 21.3 append the app name to the # route name, and so we need to remove it from Route name so the # transaction name is consistent across all versions sanic_app_name = self.ctx.app.name sanic_route = rv[0].name if sanic_route.startswith("%s." % sanic_app_name): # We add a 1 to the len of the sanic_app_name because there is a dot # that joins app name and the route name # Format: app_name.route_name sanic_route = sanic_route[len(sanic_app_name) + 1 :] scope.set_transaction_name( sanic_route, source=TRANSACTION_SOURCE_COMPONENT ) else: scope.set_transaction_name( rv[0].__name__, source=TRANSACTION_SOURCE_COMPONENT ) return rv @ensure_integration_enabled(SanicIntegration) def _capture_exception(exception): # type: (Union[ExcInfo, BaseException]) -> None with capture_internal_exceptions(): event, hint = event_from_exception( exception, client_options=sentry_sdk.get_client().options, mechanism={"type": "sanic", "handled": False}, ) if hint and hasattr(hint["exc_info"][0], "quiet") and hint["exc_info"][0].quiet: return sentry_sdk.capture_event(event, hint=hint) def _make_request_processor(weak_request): # type: (Callable[[], Request]) -> EventProcessor def sanic_processor(event, hint): # type: (Event, Optional[Hint]) -> Optional[Event] try: if hint and issubclass(hint["exc_info"][0], SanicException): return None except KeyError: pass request = weak_request() if request is None: return event with capture_internal_exceptions(): extractor = SanicRequestExtractor(request) extractor.extract_into_event(event) request_info = event["request"] urlparts = urlsplit(request.url) request_info["url"] = "%s://%s%s" % ( urlparts.scheme, urlparts.netloc, urlparts.path, ) request_info["query_string"] = urlparts.query request_info["method"] = request.method request_info["env"] = {"REMOTE_ADDR": request.remote_addr} request_info["headers"] = _filter_headers(dict(request.headers)) return event return sanic_processor sentry-python-2.18.0/sentry_sdk/integrations/serverless.py000066400000000000000000000034141471214654000241150ustar00rootroot00000000000000import sys from functools import wraps import sentry_sdk from sentry_sdk.utils import event_from_exception, reraise from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import TypeVar from typing import Union from typing import Optional from typing import overload F = TypeVar("F", bound=Callable[..., Any]) else: def overload(x): # type: (F) -> F return x @overload def serverless_function(f, flush=True): # type: (F, bool) -> F pass @overload def serverless_function(f=None, flush=True): # noqa: F811 # type: (None, bool) -> Callable[[F], F] pass def serverless_function(f=None, flush=True): # noqa # type: (Optional[F], bool) -> Union[F, Callable[[F], F]] def wrapper(f): # type: (F) -> F @wraps(f) def inner(*args, **kwargs): # type: (*Any, **Any) -> Any with sentry_sdk.isolation_scope() as scope: scope.clear_breadcrumbs() try: return f(*args, **kwargs) except Exception: _capture_and_reraise() finally: if flush: sentry_sdk.flush() return inner # type: ignore if f is None: return wrapper else: return wrapper(f) def _capture_and_reraise(): # type: () -> None exc_info = sys.exc_info() client = sentry_sdk.get_client() if client.is_active(): event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "serverless", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) reraise(*exc_info) sentry-python-2.18.0/sentry_sdk/integrations/socket.py000066400000000000000000000057341471214654000232170ustar00rootroot00000000000000import socket import sentry_sdk from sentry_sdk._types import MYPY from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration if MYPY: from socket import AddressFamily, SocketKind from typing import Tuple, Optional, Union, List __all__ = ["SocketIntegration"] class SocketIntegration(Integration): identifier = "socket" origin = f"auto.socket.{identifier}" @staticmethod def setup_once(): # type: () -> None """ patches two of the most used functions of socket: create_connection and getaddrinfo(dns resolver) """ _patch_create_connection() _patch_getaddrinfo() def _get_span_description(host, port): # type: (Union[bytes, str, None], Union[str, int, None]) -> str try: host = host.decode() # type: ignore except (UnicodeDecodeError, AttributeError): pass description = "%s:%s" % (host, port) # type: ignore return description def _patch_create_connection(): # type: () -> None real_create_connection = socket.create_connection def create_connection( address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, # type: ignore source_address=None, ): # type: (Tuple[Optional[str], int], Optional[float], Optional[Tuple[Union[bytearray, bytes, str], int]])-> socket.socket integration = sentry_sdk.get_client().get_integration(SocketIntegration) if integration is None: return real_create_connection(address, timeout, source_address) with sentry_sdk.start_span( op=OP.SOCKET_CONNECTION, name=_get_span_description(address[0], address[1]), origin=SocketIntegration.origin, ) as span: span.set_data("address", address) span.set_data("timeout", timeout) span.set_data("source_address", source_address) return real_create_connection( address=address, timeout=timeout, source_address=source_address ) socket.create_connection = create_connection # type: ignore def _patch_getaddrinfo(): # type: () -> None real_getaddrinfo = socket.getaddrinfo def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): # type: (Union[bytes, str, None], Union[str, int, None], int, int, int, int) -> List[Tuple[AddressFamily, SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]] integration = sentry_sdk.get_client().get_integration(SocketIntegration) if integration is None: return real_getaddrinfo(host, port, family, type, proto, flags) with sentry_sdk.start_span( op=OP.SOCKET_DNS, name=_get_span_description(host, port), origin=SocketIntegration.origin, ) as span: span.set_data("host", host) span.set_data("port", port) return real_getaddrinfo(host, port, family, type, proto, flags) socket.getaddrinfo = getaddrinfo # type: ignore sentry-python-2.18.0/sentry_sdk/integrations/spark/000077500000000000000000000000001471214654000224645ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/integrations/spark/__init__.py000066400000000000000000000003201471214654000245700ustar00rootroot00000000000000from sentry_sdk.integrations.spark.spark_driver import SparkIntegration from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration __all__ = ["SparkIntegration", "SparkWorkerIntegration"] sentry-python-2.18.0/sentry_sdk/integrations/spark/spark_driver.py000066400000000000000000000202241471214654000255310ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Optional from sentry_sdk._types import Event, Hint class SparkIntegration(Integration): identifier = "spark" @staticmethod def setup_once(): # type: () -> None patch_spark_context_init() def _set_app_properties(): # type: () -> None """ Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties. This allows worker integration to have access to app_name and application_id. """ from pyspark import SparkContext spark_context = SparkContext._active_spark_context if spark_context: spark_context.setLocalProperty("sentry_app_name", spark_context.appName) spark_context.setLocalProperty( "sentry_application_id", spark_context.applicationId ) def _start_sentry_listener(sc): # type: (Any) -> None """ Start java gateway server to add custom `SparkListener` """ from pyspark.java_gateway import ensure_callback_server_started gw = sc._gateway ensure_callback_server_started(gw) listener = SentryListener() sc._jsc.sc().addSparkListener(listener) def patch_spark_context_init(): # type: () -> None from pyspark import SparkContext spark_context_init = SparkContext._do_init @ensure_integration_enabled(SparkIntegration, spark_context_init) def _sentry_patched_spark_context_init(self, *args, **kwargs): # type: (SparkContext, *Any, **Any) -> Optional[Any] rv = spark_context_init(self, *args, **kwargs) _start_sentry_listener(self) _set_app_properties() scope = sentry_sdk.get_isolation_scope() @scope.add_event_processor def process_event(event, hint): # type: (Event, Hint) -> Optional[Event] with capture_internal_exceptions(): if sentry_sdk.get_client().get_integration(SparkIntegration) is None: return event if self._active_spark_context is None: return event event.setdefault("user", {}).setdefault("id", self.sparkUser()) event.setdefault("tags", {}).setdefault( "executor.id", self._conf.get("spark.executor.id") ) event["tags"].setdefault( "spark-submit.deployMode", self._conf.get("spark.submit.deployMode"), ) event["tags"].setdefault( "driver.host", self._conf.get("spark.driver.host") ) event["tags"].setdefault( "driver.port", self._conf.get("spark.driver.port") ) event["tags"].setdefault("spark_version", self.version) event["tags"].setdefault("app_name", self.appName) event["tags"].setdefault("application_id", self.applicationId) event["tags"].setdefault("master", self.master) event["tags"].setdefault("spark_home", self.sparkHome) event.setdefault("extra", {}).setdefault("web_url", self.uiWebUrl) return event return rv SparkContext._do_init = _sentry_patched_spark_context_init class SparkListener: def onApplicationEnd(self, applicationEnd): # noqa: N802,N803 # type: (Any) -> None pass def onApplicationStart(self, applicationStart): # noqa: N802,N803 # type: (Any) -> None pass def onBlockManagerAdded(self, blockManagerAdded): # noqa: N802,N803 # type: (Any) -> None pass def onBlockManagerRemoved(self, blockManagerRemoved): # noqa: N802,N803 # type: (Any) -> None pass def onBlockUpdated(self, blockUpdated): # noqa: N802,N803 # type: (Any) -> None pass def onEnvironmentUpdate(self, environmentUpdate): # noqa: N802,N803 # type: (Any) -> None pass def onExecutorAdded(self, executorAdded): # noqa: N802,N803 # type: (Any) -> None pass def onExecutorBlacklisted(self, executorBlacklisted): # noqa: N802,N803 # type: (Any) -> None pass def onExecutorBlacklistedForStage( # noqa: N802 self, executorBlacklistedForStage # noqa: N803 ): # type: (Any) -> None pass def onExecutorMetricsUpdate(self, executorMetricsUpdate): # noqa: N802,N803 # type: (Any) -> None pass def onExecutorRemoved(self, executorRemoved): # noqa: N802,N803 # type: (Any) -> None pass def onJobEnd(self, jobEnd): # noqa: N802,N803 # type: (Any) -> None pass def onJobStart(self, jobStart): # noqa: N802,N803 # type: (Any) -> None pass def onNodeBlacklisted(self, nodeBlacklisted): # noqa: N802,N803 # type: (Any) -> None pass def onNodeBlacklistedForStage(self, nodeBlacklistedForStage): # noqa: N802,N803 # type: (Any) -> None pass def onNodeUnblacklisted(self, nodeUnblacklisted): # noqa: N802,N803 # type: (Any) -> None pass def onOtherEvent(self, event): # noqa: N802,N803 # type: (Any) -> None pass def onSpeculativeTaskSubmitted(self, speculativeTask): # noqa: N802,N803 # type: (Any) -> None pass def onStageCompleted(self, stageCompleted): # noqa: N802,N803 # type: (Any) -> None pass def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803 # type: (Any) -> None pass def onTaskEnd(self, taskEnd): # noqa: N802,N803 # type: (Any) -> None pass def onTaskGettingResult(self, taskGettingResult): # noqa: N802,N803 # type: (Any) -> None pass def onTaskStart(self, taskStart): # noqa: N802,N803 # type: (Any) -> None pass def onUnpersistRDD(self, unpersistRDD): # noqa: N802,N803 # type: (Any) -> None pass class Java: implements = ["org.apache.spark.scheduler.SparkListenerInterface"] class SentryListener(SparkListener): def onJobStart(self, jobStart): # noqa: N802,N803 # type: (Any) -> None message = "Job {} Started".format(jobStart.jobId()) sentry_sdk.add_breadcrumb(level="info", message=message) _set_app_properties() def onJobEnd(self, jobEnd): # noqa: N802,N803 # type: (Any) -> None level = "" message = "" data = {"result": jobEnd.jobResult().toString()} if jobEnd.jobResult().toString() == "JobSucceeded": level = "info" message = "Job {} Ended".format(jobEnd.jobId()) else: level = "warning" message = "Job {} Failed".format(jobEnd.jobId()) sentry_sdk.add_breadcrumb(level=level, message=message, data=data) def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803 # type: (Any) -> None stage_info = stageSubmitted.stageInfo() message = "Stage {} Submitted".format(stage_info.stageId()) data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()} sentry_sdk.add_breadcrumb(level="info", message=message, data=data) _set_app_properties() def onStageCompleted(self, stageCompleted): # noqa: N802,N803 # type: (Any) -> None from py4j.protocol import Py4JJavaError # type: ignore stage_info = stageCompleted.stageInfo() message = "" level = "" data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()} # Have to Try Except because stageInfo.failureReason() is typed with Scala Option try: data["reason"] = stage_info.failureReason().get() message = "Stage {} Failed".format(stage_info.stageId()) level = "warning" except Py4JJavaError: message = "Stage {} Completed".format(stage_info.stageId()) level = "info" sentry_sdk.add_breadcrumb(level=level, message=message, data=data) sentry-python-2.18.0/sentry_sdk/integrations/spark/spark_worker.py000066400000000000000000000071721471214654000255560ustar00rootroot00000000000000import sys import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.utils import ( capture_internal_exceptions, exc_info_from_error, single_exception_from_error_tuple, walk_exception_chain, event_hint_with_exc_info, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Optional from sentry_sdk._types import ExcInfo, Event, Hint class SparkWorkerIntegration(Integration): identifier = "spark_worker" @staticmethod def setup_once(): # type: () -> None import pyspark.daemon as original_daemon original_daemon.worker_main = _sentry_worker_main def _capture_exception(exc_info): # type: (ExcInfo) -> None client = sentry_sdk.get_client() mechanism = {"type": "spark", "handled": False} exc_info = exc_info_from_error(exc_info) exc_type, exc_value, tb = exc_info rv = [] # On Exception worker will call sys.exit(-1), so we can ignore SystemExit and similar errors for exc_type, exc_value, tb in walk_exception_chain(exc_info): if exc_type not in (SystemExit, EOFError, ConnectionResetError): rv.append( single_exception_from_error_tuple( exc_type, exc_value, tb, client.options, mechanism ) ) if rv: rv.reverse() hint = event_hint_with_exc_info(exc_info) event = {"level": "error", "exception": {"values": rv}} # type: Event _tag_task_context() sentry_sdk.capture_event(event, hint=hint) def _tag_task_context(): # type: () -> None from pyspark.taskcontext import TaskContext scope = sentry_sdk.get_isolation_scope() @scope.add_event_processor def process_event(event, hint): # type: (Event, Hint) -> Optional[Event] with capture_internal_exceptions(): integration = sentry_sdk.get_client().get_integration( SparkWorkerIntegration ) task_context = TaskContext.get() if integration is None or task_context is None: return event event.setdefault("tags", {}).setdefault( "stageId", str(task_context.stageId()) ) event["tags"].setdefault("partitionId", str(task_context.partitionId())) event["tags"].setdefault("attemptNumber", str(task_context.attemptNumber())) event["tags"].setdefault("taskAttemptId", str(task_context.taskAttemptId())) if task_context._localProperties: if "sentry_app_name" in task_context._localProperties: event["tags"].setdefault( "app_name", task_context._localProperties["sentry_app_name"] ) event["tags"].setdefault( "application_id", task_context._localProperties["sentry_application_id"], ) if "callSite.short" in task_context._localProperties: event.setdefault("extra", {}).setdefault( "callSite", task_context._localProperties["callSite.short"] ) return event def _sentry_worker_main(*args, **kwargs): # type: (*Optional[Any], **Optional[Any]) -> None import pyspark.worker as original_worker try: original_worker.main(*args, **kwargs) except SystemExit: if sentry_sdk.get_client().get_integration(SparkWorkerIntegration) is not None: exc_info = sys.exc_info() with capture_internal_exceptions(): _capture_exception(exc_info) sentry-python-2.18.0/sentry_sdk/integrations/sqlalchemy.py000066400000000000000000000106721471214654000240660ustar00rootroot00000000000000from sentry_sdk.consts import SPANSTATUS, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, parse_version, ) try: from sqlalchemy.engine import Engine # type: ignore from sqlalchemy.event import listen # type: ignore from sqlalchemy import __version__ as SQLALCHEMY_VERSION # type: ignore except ImportError: raise DidNotEnable("SQLAlchemy not installed.") from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import ContextManager from typing import Optional from sentry_sdk.tracing import Span class SqlalchemyIntegration(Integration): identifier = "sqlalchemy" origin = f"auto.db.{identifier}" @staticmethod def setup_once(): # type: () -> None version = parse_version(SQLALCHEMY_VERSION) if version is None: raise DidNotEnable( "Unparsable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION) ) if version < (1, 2): raise DidNotEnable("SQLAlchemy 1.2 or newer required.") listen(Engine, "before_cursor_execute", _before_cursor_execute) listen(Engine, "after_cursor_execute", _after_cursor_execute) listen(Engine, "handle_error", _handle_error) @ensure_integration_enabled(SqlalchemyIntegration) def _before_cursor_execute( conn, cursor, statement, parameters, context, executemany, *args ): # type: (Any, Any, Any, Any, Any, bool, *Any) -> None ctx_mgr = record_sql_queries( cursor, statement, parameters, paramstyle=context and context.dialect and context.dialect.paramstyle or None, executemany=executemany, span_origin=SqlalchemyIntegration.origin, ) context._sentry_sql_span_manager = ctx_mgr span = ctx_mgr.__enter__() if span is not None: _set_db_data(span, conn) context._sentry_sql_span = span @ensure_integration_enabled(SqlalchemyIntegration) def _after_cursor_execute(conn, cursor, statement, parameters, context, *args): # type: (Any, Any, Any, Any, Any, *Any) -> None ctx_mgr = getattr( context, "_sentry_sql_span_manager", None ) # type: Optional[ContextManager[Any]] if ctx_mgr is not None: context._sentry_sql_span_manager = None ctx_mgr.__exit__(None, None, None) span = getattr(context, "_sentry_sql_span", None) # type: Optional[Span] if span is not None: with capture_internal_exceptions(): add_query_source(span) def _handle_error(context, *args): # type: (Any, *Any) -> None execution_context = context.execution_context if execution_context is None: return span = getattr(execution_context, "_sentry_sql_span", None) # type: Optional[Span] if span is not None: span.set_status(SPANSTATUS.INTERNAL_ERROR) # _after_cursor_execute does not get called for crashing SQL stmts. Judging # from SQLAlchemy codebase it does seem like any error coming into this # handler is going to be fatal. ctx_mgr = getattr( execution_context, "_sentry_sql_span_manager", None ) # type: Optional[ContextManager[Any]] if ctx_mgr is not None: execution_context._sentry_sql_span_manager = None ctx_mgr.__exit__(None, None, None) # See: https://docs.sqlalchemy.org/en/20/dialects/index.html def _get_db_system(name): # type: (str) -> Optional[str] name = str(name) if "sqlite" in name: return "sqlite" if "postgres" in name: return "postgresql" if "mariadb" in name: return "mariadb" if "mysql" in name: return "mysql" if "oracle" in name: return "oracle" return None def _set_db_data(span, conn): # type: (Span, Any) -> None db_system = _get_db_system(conn.engine.name) if db_system is not None: span.set_data(SPANDATA.DB_SYSTEM, db_system) if conn.engine.url is None: return db_name = conn.engine.url.database if db_name is not None: span.set_data(SPANDATA.DB_NAME, db_name) server_address = conn.engine.url.host if server_address is not None: span.set_data(SPANDATA.SERVER_ADDRESS, server_address) server_port = conn.engine.url.port if server_port is not None: span.set_data(SPANDATA.SERVER_PORT, server_port) sentry-python-2.18.0/sentry_sdk/integrations/starlette.py000066400000000000000000000630561471214654000237370ustar00rootroot00000000000000import asyncio import functools import warnings from collections.abc import Set from copy import deepcopy import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import ( DidNotEnable, Integration, _DEFAULT_FAILED_REQUEST_STATUS_CODES, ) from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, HttpCodeRangeContainer, _is_json_content_type, request_body_within_bounds, ) from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import ( SOURCE_FOR_STYLE, TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_ROUTE, ) from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, ensure_integration_enabled, event_from_exception, logger, parse_version, transaction_from_function, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Awaitable, Callable, Container, Dict, Optional, Tuple, Union from sentry_sdk._types import Event, HttpStatusCodeRange try: import starlette # type: ignore from starlette import __version__ as STARLETTE_VERSION from starlette.applications import Starlette # type: ignore from starlette.datastructures import UploadFile # type: ignore from starlette.middleware import Middleware # type: ignore from starlette.middleware.authentication import ( # type: ignore AuthenticationMiddleware, ) from starlette.requests import Request # type: ignore from starlette.routing import Match # type: ignore from starlette.types import ASGIApp, Receive, Scope as StarletteScope, Send # type: ignore except ImportError: raise DidNotEnable("Starlette is not installed") try: # Starlette 0.20 from starlette.middleware.exceptions import ExceptionMiddleware # type: ignore except ImportError: # Startlette 0.19.1 from starlette.exceptions import ExceptionMiddleware # type: ignore try: # Optional dependency of Starlette to parse form data. try: # python-multipart 0.0.13 and later import python_multipart as multipart # type: ignore except ImportError: # python-multipart 0.0.12 and earlier import multipart # type: ignore except ImportError: multipart = None _DEFAULT_TRANSACTION_NAME = "generic Starlette request" TRANSACTION_STYLE_VALUES = ("endpoint", "url") class StarletteIntegration(Integration): identifier = "starlette" origin = f"auto.http.{identifier}" transaction_style = "" def __init__( self, transaction_style="url", # type: str failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Union[Set[int], list[HttpStatusCodeRange], None] middleware_spans=True, # type: bool http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] ): # type: (...) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style self.middleware_spans = middleware_spans self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture)) if isinstance(failed_request_status_codes, Set): self.failed_request_status_codes = ( failed_request_status_codes ) # type: Container[int] else: warnings.warn( "Passing a list or None for failed_request_status_codes is deprecated. " "Please pass a set of int instead.", DeprecationWarning, stacklevel=2, ) if failed_request_status_codes is None: self.failed_request_status_codes = _DEFAULT_FAILED_REQUEST_STATUS_CODES else: self.failed_request_status_codes = HttpCodeRangeContainer( failed_request_status_codes ) @staticmethod def setup_once(): # type: () -> None version = parse_version(STARLETTE_VERSION) if version is None: raise DidNotEnable( "Unparsable Starlette version: {}".format(STARLETTE_VERSION) ) patch_middlewares() patch_asgi_app() patch_request_response() if version >= (0, 24): patch_templates() def _enable_span_for_middleware(middleware_class): # type: (Any) -> type old_call = middleware_class.__call__ async def _create_span_call(app, scope, receive, send, **kwargs): # type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None integration = sentry_sdk.get_client().get_integration(StarletteIntegration) if integration is None or not integration.middleware_spans: return await old_call(app, scope, receive, send, **kwargs) middleware_name = app.__class__.__name__ # Update transaction name with middleware name name, source = _get_transaction_from_middleware(app, scope, integration) if name is not None: sentry_sdk.get_current_scope().set_transaction_name( name, source=source, ) with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLETTE, name=middleware_name, origin=StarletteIntegration.origin, ) as middleware_span: middleware_span.set_tag("starlette.middleware_name", middleware_name) # Creating spans for the "receive" callback async def _sentry_receive(*args, **kwargs): # type: (*Any, **Any) -> Any with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLETTE_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=StarletteIntegration.origin, ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await receive(*args, **kwargs) receive_name = getattr(receive, "__name__", str(receive)) receive_patched = receive_name == "_sentry_receive" new_receive = _sentry_receive if not receive_patched else receive # Creating spans for the "send" callback async def _sentry_send(*args, **kwargs): # type: (*Any, **Any) -> Any with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLETTE_SEND, name=getattr(send, "__qualname__", str(send)), origin=StarletteIntegration.origin, ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await send(*args, **kwargs) send_name = getattr(send, "__name__", str(send)) send_patched = send_name == "_sentry_send" new_send = _sentry_send if not send_patched else send return await old_call(app, scope, new_receive, new_send, **kwargs) not_yet_patched = old_call.__name__ not in [ "_create_span_call", "_sentry_authenticationmiddleware_call", "_sentry_exceptionmiddleware_call", ] if not_yet_patched: middleware_class.__call__ = _create_span_call return middleware_class @ensure_integration_enabled(StarletteIntegration) def _capture_exception(exception, handled=False): # type: (BaseException, **Any) -> None event, hint = event_from_exception( exception, client_options=sentry_sdk.get_client().options, mechanism={"type": StarletteIntegration.identifier, "handled": handled}, ) sentry_sdk.capture_event(event, hint=hint) def patch_exception_middleware(middleware_class): # type: (Any) -> None """ Capture all exceptions in Starlette app and also extract user information. """ old_middleware_init = middleware_class.__init__ not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init) if not_yet_patched: def _sentry_middleware_init(self, *args, **kwargs): # type: (Any, Any, Any) -> None old_middleware_init(self, *args, **kwargs) # Patch existing exception handlers old_handlers = self._exception_handlers.copy() async def _sentry_patched_exception_handler(self, *args, **kwargs): # type: (Any, Any, Any) -> None integration = sentry_sdk.get_client().get_integration( StarletteIntegration ) exp = args[0] if integration is not None: is_http_server_error = ( hasattr(exp, "status_code") and isinstance(exp.status_code, int) and exp.status_code in integration.failed_request_status_codes ) if is_http_server_error: _capture_exception(exp, handled=True) # Find a matching handler old_handler = None for cls in type(exp).__mro__: if cls in old_handlers: old_handler = old_handlers[cls] break if old_handler is None: return if _is_async_callable(old_handler): return await old_handler(self, *args, **kwargs) else: return old_handler(self, *args, **kwargs) for key in self._exception_handlers.keys(): self._exception_handlers[key] = _sentry_patched_exception_handler middleware_class.__init__ = _sentry_middleware_init old_call = middleware_class.__call__ async def _sentry_exceptionmiddleware_call(self, scope, receive, send): # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None # Also add the user (that was eventually set by be Authentication middle # that was called before this middleware). This is done because the authentication # middleware sets the user in the scope and then (in the same function) # calls this exception middelware. In case there is no exception (or no handler # for the type of exception occuring) then the exception bubbles up and setting the # user information into the sentry scope is done in auth middleware and the # ASGI middleware will then send everything to Sentry and this is fine. # But if there is an exception happening that the exception middleware here # has a handler for, it will send the exception directly to Sentry, so we need # the user information right now. # This is why we do it here. _add_user_to_sentry_scope(scope) await old_call(self, scope, receive, send) middleware_class.__call__ = _sentry_exceptionmiddleware_call @ensure_integration_enabled(StarletteIntegration) def _add_user_to_sentry_scope(scope): # type: (Dict[str, Any]) -> None """ Extracts user information from the ASGI scope and adds it to Sentry's scope. """ if "user" not in scope: return if not should_send_default_pii(): return user_info = {} # type: Dict[str, Any] starlette_user = scope["user"] username = getattr(starlette_user, "username", None) if username: user_info.setdefault("username", starlette_user.username) user_id = getattr(starlette_user, "id", None) if user_id: user_info.setdefault("id", starlette_user.id) email = getattr(starlette_user, "email", None) if email: user_info.setdefault("email", starlette_user.email) sentry_scope = sentry_sdk.get_isolation_scope() sentry_scope.user = user_info def patch_authentication_middleware(middleware_class): # type: (Any) -> None """ Add user information to Sentry scope. """ old_call = middleware_class.__call__ not_yet_patched = "_sentry_authenticationmiddleware_call" not in str(old_call) if not_yet_patched: async def _sentry_authenticationmiddleware_call(self, scope, receive, send): # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None await old_call(self, scope, receive, send) _add_user_to_sentry_scope(scope) middleware_class.__call__ = _sentry_authenticationmiddleware_call def patch_middlewares(): # type: () -> None """ Patches Starlettes `Middleware` class to record spans for every middleware invoked. """ old_middleware_init = Middleware.__init__ not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init) if not_yet_patched: def _sentry_middleware_init(self, cls, **options): # type: (Any, Any, Any) -> None if cls == SentryAsgiMiddleware: return old_middleware_init(self, cls, **options) span_enabled_cls = _enable_span_for_middleware(cls) old_middleware_init(self, span_enabled_cls, **options) if cls == AuthenticationMiddleware: patch_authentication_middleware(cls) if cls == ExceptionMiddleware: patch_exception_middleware(cls) Middleware.__init__ = _sentry_middleware_init def patch_asgi_app(): # type: () -> None """ Instrument Starlette ASGI app using the SentryAsgiMiddleware. """ old_app = Starlette.__call__ async def _sentry_patched_asgi_app(self, scope, receive, send): # type: (Starlette, StarletteScope, Receive, Send) -> None integration = sentry_sdk.get_client().get_integration(StarletteIntegration) if integration is None: return await old_app(self, scope, receive, send) middleware = SentryAsgiMiddleware( lambda *a, **kw: old_app(self, *a, **kw), mechanism_type=StarletteIntegration.identifier, transaction_style=integration.transaction_style, span_origin=StarletteIntegration.origin, http_methods_to_capture=( integration.http_methods_to_capture if integration else DEFAULT_HTTP_METHODS_TO_CAPTURE ), ) middleware.__call__ = middleware._run_asgi3 return await middleware(scope, receive, send) Starlette.__call__ = _sentry_patched_asgi_app # This was vendored in from Starlette to support Starlette 0.19.1 because # this function was only introduced in 0.20.x def _is_async_callable(obj): # type: (Any) -> bool while isinstance(obj, functools.partial): obj = obj.func return asyncio.iscoroutinefunction(obj) or ( callable(obj) and asyncio.iscoroutinefunction(obj.__call__) ) def patch_request_response(): # type: () -> None old_request_response = starlette.routing.request_response def _sentry_request_response(func): # type: (Callable[[Any], Any]) -> ASGIApp old_func = func is_coroutine = _is_async_callable(old_func) if is_coroutine: async def _sentry_async_func(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration( StarletteIntegration ) if integration is None: return await old_func(*args, **kwargs) request = args[0] _set_transaction_name_and_source( sentry_sdk.get_current_scope(), integration.transaction_style, request, ) sentry_scope = sentry_sdk.get_isolation_scope() extractor = StarletteRequestExtractor(request) info = await extractor.extract_request_info() def _make_request_event_processor(req, integration): # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event] def event_processor(event, hint): # type: (Event, Dict[str, Any]) -> Event # Add info from request to event request_info = event.get("request", {}) if info: if "cookies" in info: request_info["cookies"] = info["cookies"] if "data" in info: request_info["data"] = info["data"] event["request"] = deepcopy(request_info) return event return event_processor sentry_scope._name = StarletteIntegration.identifier sentry_scope.add_event_processor( _make_request_event_processor(request, integration) ) return await old_func(*args, **kwargs) func = _sentry_async_func else: @functools.wraps(old_func) def _sentry_sync_func(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration( StarletteIntegration ) if integration is None: return old_func(*args, **kwargs) current_scope = sentry_sdk.get_current_scope() if current_scope.transaction is not None: current_scope.transaction.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() request = args[0] _set_transaction_name_and_source( sentry_scope, integration.transaction_style, request ) extractor = StarletteRequestExtractor(request) cookies = extractor.extract_cookies_from_request() def _make_request_event_processor(req, integration): # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event] def event_processor(event, hint): # type: (Event, dict[str, Any]) -> Event # Extract information from request request_info = event.get("request", {}) if cookies: request_info["cookies"] = cookies event["request"] = deepcopy(request_info) return event return event_processor sentry_scope._name = StarletteIntegration.identifier sentry_scope.add_event_processor( _make_request_event_processor(request, integration) ) return old_func(*args, **kwargs) func = _sentry_sync_func return old_request_response(func) starlette.routing.request_response = _sentry_request_response def patch_templates(): # type: () -> None # If markupsafe is not installed, then Jinja2 is not installed # (markupsafe is a dependency of Jinja2) # In this case we do not need to patch the Jinja2Templates class try: from markupsafe import Markup except ImportError: return # Nothing to do from starlette.templating import Jinja2Templates # type: ignore old_jinja2templates_init = Jinja2Templates.__init__ not_yet_patched = "_sentry_jinja2templates_init" not in str( old_jinja2templates_init ) if not_yet_patched: def _sentry_jinja2templates_init(self, *args, **kwargs): # type: (Jinja2Templates, *Any, **Any) -> None def add_sentry_trace_meta(request): # type: (Request) -> Dict[str, Any] trace_meta = Markup( sentry_sdk.get_current_scope().trace_propagation_meta() ) return { "sentry_trace_meta": trace_meta, } kwargs.setdefault("context_processors", []) if add_sentry_trace_meta not in kwargs["context_processors"]: kwargs["context_processors"].append(add_sentry_trace_meta) return old_jinja2templates_init(self, *args, **kwargs) Jinja2Templates.__init__ = _sentry_jinja2templates_init class StarletteRequestExtractor: """ Extracts useful information from the Starlette request (like form data or cookies) and adds it to the Sentry event. """ request = None # type: Request def __init__(self, request): # type: (StarletteRequestExtractor, Request) -> None self.request = request def extract_cookies_from_request(self): # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] cookies = None # type: Optional[Dict[str, Any]] if should_send_default_pii(): cookies = self.cookies() return cookies async def extract_request_info(self): # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] client = sentry_sdk.get_client() request_info = {} # type: Dict[str, Any] with capture_internal_exceptions(): # Add cookies if should_send_default_pii(): request_info["cookies"] = self.cookies() # If there is no body, just return the cookies content_length = await self.content_length() if not content_length: return request_info # Add annotation if body is too big if content_length and not request_body_within_bounds( client, content_length ): request_info["data"] = AnnotatedValue.removed_because_over_size_limit() return request_info # Add JSON body, if it is a JSON request json = await self.json() if json: request_info["data"] = json return request_info # Add form as key/value pairs, if request has form data form = await self.form() if form: form_data = {} for key, val in form.items(): is_file = isinstance(val, UploadFile) form_data[key] = ( val if not is_file else AnnotatedValue.removed_because_raw_data() ) request_info["data"] = form_data return request_info # Raw data, do not add body just an annotation request_info["data"] = AnnotatedValue.removed_because_raw_data() return request_info async def content_length(self): # type: (StarletteRequestExtractor) -> Optional[int] if "content-length" in self.request.headers: return int(self.request.headers["content-length"]) return None def cookies(self): # type: (StarletteRequestExtractor) -> Dict[str, Any] return self.request.cookies async def form(self): # type: (StarletteRequestExtractor) -> Any if multipart is None: return None # Parse the body first to get it cached, as Starlette does not cache form() as it # does with body() and json() https://github.com/encode/starlette/discussions/1933 # Calling `.form()` without calling `.body()` first will # potentially break the users project. await self.request.body() return await self.request.form() def is_json(self): # type: (StarletteRequestExtractor) -> bool return _is_json_content_type(self.request.headers.get("content-type")) async def json(self): # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] if not self.is_json(): return None return await self.request.json() def _transaction_name_from_router(scope): # type: (StarletteScope) -> Optional[str] router = scope.get("router") if not router: return None for route in router.routes: match = route.matches(scope) if match[0] == Match.FULL: return route.path return None def _set_transaction_name_and_source(scope, transaction_style, request): # type: (sentry_sdk.Scope, str, Any) -> None name = None source = SOURCE_FOR_STYLE[transaction_style] if transaction_style == "endpoint": endpoint = request.scope.get("endpoint") if endpoint: name = transaction_from_function(endpoint) or None elif transaction_style == "url": name = _transaction_name_from_router(request.scope) if name is None: name = _DEFAULT_TRANSACTION_NAME source = TRANSACTION_SOURCE_ROUTE scope.set_transaction_name(name, source=source) logger.debug( "[Starlette] Set transaction name and source on scope: %s / %s", name, source ) def _get_transaction_from_middleware(app, asgi_scope, integration): # type: (Any, Dict[str, Any], StarletteIntegration) -> Tuple[Optional[str], Optional[str]] name = None source = None if integration.transaction_style == "endpoint": name = transaction_from_function(app.__class__) source = TRANSACTION_SOURCE_COMPONENT elif integration.transaction_style == "url": name = _transaction_name_from_router(asgi_scope) source = TRANSACTION_SOURCE_ROUTE return name, source sentry-python-2.18.0/sentry_sdk/integrations/starlite.py000066400000000000000000000246041471214654000235530ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, transaction_from_function, ) try: from starlite import Request, Starlite, State # type: ignore from starlite.handlers.base import BaseRouteHandler # type: ignore from starlite.middleware import DefineMiddleware # type: ignore from starlite.plugins.base import get_plugin_for_value # type: ignore from starlite.routes.http import HTTPRoute # type: ignore from starlite.utils import ConnectionDataExtractor, is_async_callable, Ref # type: ignore from pydantic import BaseModel # type: ignore except ImportError: raise DidNotEnable("Starlite is not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Optional, Union from starlite.types import ( # type: ignore ASGIApp, Hint, HTTPReceiveMessage, HTTPScope, Message, Middleware, Receive, Scope as StarliteScope, Send, WebSocketReceiveMessage, ) from starlite import MiddlewareProtocol from sentry_sdk._types import Event _DEFAULT_TRANSACTION_NAME = "generic Starlite request" class StarliteIntegration(Integration): identifier = "starlite" origin = f"auto.http.{identifier}" @staticmethod def setup_once(): # type: () -> None patch_app_init() patch_middlewares() patch_http_route_handle() class SentryStarliteASGIMiddleware(SentryAsgiMiddleware): def __init__(self, app, span_origin=StarliteIntegration.origin): # type: (ASGIApp, str) -> None super().__init__( app=app, unsafe_context_data=False, transaction_style="endpoint", mechanism_type="asgi", span_origin=span_origin, ) def patch_app_init(): # type: () -> None """ Replaces the Starlite class's `__init__` function in order to inject `after_exception` handlers and set the `SentryStarliteASGIMiddleware` as the outmost middleware in the stack. See: - https://starlite-api.github.io/starlite/usage/0-the-starlite-app/5-application-hooks/#after-exception - https://starlite-api.github.io/starlite/usage/7-middleware/0-middleware-intro/ """ old__init__ = Starlite.__init__ @ensure_integration_enabled(StarliteIntegration, old__init__) def injection_wrapper(self, *args, **kwargs): # type: (Starlite, *Any, **Any) -> None after_exception = kwargs.pop("after_exception", []) kwargs.update( after_exception=[ exception_handler, *( after_exception if isinstance(after_exception, list) else [after_exception] ), ] ) SentryStarliteASGIMiddleware.__call__ = SentryStarliteASGIMiddleware._run_asgi3 # type: ignore middleware = kwargs.get("middleware") or [] kwargs["middleware"] = [SentryStarliteASGIMiddleware, *middleware] old__init__(self, *args, **kwargs) Starlite.__init__ = injection_wrapper def patch_middlewares(): # type: () -> None old_resolve_middleware_stack = BaseRouteHandler.resolve_middleware @ensure_integration_enabled(StarliteIntegration, old_resolve_middleware_stack) def resolve_middleware_wrapper(self): # type: (BaseRouteHandler) -> list[Middleware] return [ enable_span_for_middleware(middleware) for middleware in old_resolve_middleware_stack(self) ] BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper def enable_span_for_middleware(middleware): # type: (Middleware) -> Middleware if ( not hasattr(middleware, "__call__") # noqa: B004 or middleware is SentryStarliteASGIMiddleware ): return middleware if isinstance(middleware, DefineMiddleware): old_call = middleware.middleware.__call__ # type: ASGIApp else: old_call = middleware.__call__ async def _create_span_call(self, scope, receive, send): # type: (MiddlewareProtocol, StarliteScope, Receive, Send) -> None if sentry_sdk.get_client().get_integration(StarliteIntegration) is None: return await old_call(self, scope, receive, send) middleware_name = self.__class__.__name__ with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLITE, name=middleware_name, origin=StarliteIntegration.origin, ) as middleware_span: middleware_span.set_tag("starlite.middleware_name", middleware_name) # Creating spans for the "receive" callback async def _sentry_receive(*args, **kwargs): # type: (*Any, **Any) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage] if sentry_sdk.get_client().get_integration(StarliteIntegration) is None: return await receive(*args, **kwargs) with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLITE_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=StarliteIntegration.origin, ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await receive(*args, **kwargs) receive_name = getattr(receive, "__name__", str(receive)) receive_patched = receive_name == "_sentry_receive" new_receive = _sentry_receive if not receive_patched else receive # Creating spans for the "send" callback async def _sentry_send(message): # type: (Message) -> None if sentry_sdk.get_client().get_integration(StarliteIntegration) is None: return await send(message) with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLITE_SEND, name=getattr(send, "__qualname__", str(send)), origin=StarliteIntegration.origin, ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await send(message) send_name = getattr(send, "__name__", str(send)) send_patched = send_name == "_sentry_send" new_send = _sentry_send if not send_patched else send return await old_call(self, scope, new_receive, new_send) not_yet_patched = old_call.__name__ not in ["_create_span_call"] if not_yet_patched: if isinstance(middleware, DefineMiddleware): middleware.middleware.__call__ = _create_span_call else: middleware.__call__ = _create_span_call return middleware def patch_http_route_handle(): # type: () -> None old_handle = HTTPRoute.handle async def handle_wrapper(self, scope, receive, send): # type: (HTTPRoute, HTTPScope, Receive, Send) -> None if sentry_sdk.get_client().get_integration(StarliteIntegration) is None: return await old_handle(self, scope, receive, send) sentry_scope = sentry_sdk.get_isolation_scope() request = scope["app"].request_class( scope=scope, receive=receive, send=send ) # type: Request[Any, Any] extracted_request_data = ConnectionDataExtractor( parse_body=True, parse_query=True )(request) body = extracted_request_data.pop("body") request_data = await body def event_processor(event, _): # type: (Event, Hint) -> Event route_handler = scope.get("route_handler") request_info = event.get("request", {}) request_info["content_length"] = len(scope.get("_body", b"")) if should_send_default_pii(): request_info["cookies"] = extracted_request_data["cookies"] if request_data is not None: request_info["data"] = request_data func = None if route_handler.name is not None: tx_name = route_handler.name elif isinstance(route_handler.fn, Ref): func = route_handler.fn.value else: func = route_handler.fn if func is not None: tx_name = transaction_from_function(func) tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]} if not tx_name: tx_name = _DEFAULT_TRANSACTION_NAME tx_info = {"source": TRANSACTION_SOURCE_ROUTE} event.update( { "request": request_info, "transaction": tx_name, "transaction_info": tx_info, } ) return event sentry_scope._name = StarliteIntegration.identifier sentry_scope.add_event_processor(event_processor) return await old_handle(self, scope, receive, send) HTTPRoute.handle = handle_wrapper def retrieve_user_from_scope(scope): # type: (StarliteScope) -> Optional[dict[str, Any]] scope_user = scope.get("user") if not scope_user: return None if isinstance(scope_user, dict): return scope_user if isinstance(scope_user, BaseModel): return scope_user.dict() if hasattr(scope_user, "asdict"): # dataclasses return scope_user.asdict() plugin = get_plugin_for_value(scope_user) if plugin and not is_async_callable(plugin.to_dict): return plugin.to_dict(scope_user) return None @ensure_integration_enabled(StarliteIntegration) def exception_handler(exc, scope, _): # type: (Exception, StarliteScope, State) -> None user_info = None # type: Optional[dict[str, Any]] if should_send_default_pii(): user_info = retrieve_user_from_scope(scope) if user_info and isinstance(user_info, dict): sentry_scope = sentry_sdk.get_isolation_scope() sentry_scope.set_user(user_info) event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, mechanism={"type": StarliteIntegration.identifier, "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) sentry-python-2.18.0/sentry_sdk/integrations/stdlib.py000066400000000000000000000211771471214654000232070ustar00rootroot00000000000000import os import subprocess import sys import platform from http.client import HTTPConnection import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor from sentry_sdk.tracing_utils import EnvironHeaders, should_propagate_trace from sentry_sdk.utils import ( SENSITIVE_DATA_SUBSTITUTE, capture_internal_exceptions, ensure_integration_enabled, is_sentry_url, logger, safe_repr, parse_url, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Optional from typing import List from sentry_sdk._types import Event, Hint _RUNTIME_CONTEXT = { "name": platform.python_implementation(), "version": "%s.%s.%s" % (sys.version_info[:3]), "build": sys.version, } # type: dict[str, object] class StdlibIntegration(Integration): identifier = "stdlib" @staticmethod def setup_once(): # type: () -> None _install_httplib() _install_subprocess() @add_global_event_processor def add_python_runtime_context(event, hint): # type: (Event, Hint) -> Optional[Event] if sentry_sdk.get_client().get_integration(StdlibIntegration) is not None: contexts = event.setdefault("contexts", {}) if isinstance(contexts, dict) and "runtime" not in contexts: contexts["runtime"] = _RUNTIME_CONTEXT return event def _install_httplib(): # type: () -> None real_putrequest = HTTPConnection.putrequest real_getresponse = HTTPConnection.getresponse def putrequest(self, method, url, *args, **kwargs): # type: (HTTPConnection, str, str, *Any, **Any) -> Any host = self.host port = self.port default_port = self.default_port client = sentry_sdk.get_client() if client.get_integration(StdlibIntegration) is None or is_sentry_url( client, host ): return real_putrequest(self, method, url, *args, **kwargs) real_url = url if real_url is None or not real_url.startswith(("http://", "https://")): real_url = "%s://%s%s%s" % ( default_port == 443 and "https" or "http", host, port != default_port and ":%s" % port or "", url, ) parsed_url = None with capture_internal_exceptions(): parsed_url = parse_url(real_url, sanitize=False) span = sentry_sdk.start_span( op=OP.HTTP_CLIENT, name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), origin="auto.http.stdlib.httplib", ) span.set_data(SPANDATA.HTTP_METHOD, method) if parsed_url is not None: span.set_data("url", parsed_url.url) span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) rv = real_putrequest(self, method, url, *args, **kwargs) if should_propagate_trace(client, real_url): for ( key, value, ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers( span=span ): logger.debug( "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format( key=key, value=value, real_url=real_url ) ) self.putheader(key, value) self._sentrysdk_span = span # type: ignore[attr-defined] return rv def getresponse(self, *args, **kwargs): # type: (HTTPConnection, *Any, **Any) -> Any span = getattr(self, "_sentrysdk_span", None) if span is None: return real_getresponse(self, *args, **kwargs) try: rv = real_getresponse(self, *args, **kwargs) span.set_http_status(int(rv.status)) span.set_data("reason", rv.reason) finally: span.finish() return rv HTTPConnection.putrequest = putrequest # type: ignore[method-assign] HTTPConnection.getresponse = getresponse # type: ignore[method-assign] def _init_argument(args, kwargs, name, position, setdefault_callback=None): # type: (List[Any], Dict[Any, Any], str, int, Optional[Callable[[Any], Any]]) -> Any """ given (*args, **kwargs) of a function call, retrieve (and optionally set a default for) an argument by either name or position. This is useful for wrapping functions with complex type signatures and extracting a few arguments without needing to redefine that function's entire type signature. """ if name in kwargs: rv = kwargs[name] if setdefault_callback is not None: rv = setdefault_callback(rv) if rv is not None: kwargs[name] = rv elif position < len(args): rv = args[position] if setdefault_callback is not None: rv = setdefault_callback(rv) if rv is not None: args[position] = rv else: rv = setdefault_callback and setdefault_callback(None) if rv is not None: kwargs[name] = rv return rv def _install_subprocess(): # type: () -> None old_popen_init = subprocess.Popen.__init__ @ensure_integration_enabled(StdlibIntegration, old_popen_init) def sentry_patched_popen_init(self, *a, **kw): # type: (subprocess.Popen[Any], *Any, **Any) -> None # Convert from tuple to list to be able to set values. a = list(a) args = _init_argument(a, kw, "args", 0) or [] cwd = _init_argument(a, kw, "cwd", 9) # if args is not a list or tuple (and e.g. some iterator instead), # let's not use it at all. There are too many things that can go wrong # when trying to collect an iterator into a list and setting that list # into `a` again. # # Also invocations where `args` is not a sequence are not actually # legal. They just happen to work under CPython. description = None if isinstance(args, (list, tuple)) and len(args) < 100: with capture_internal_exceptions(): description = " ".join(map(str, args)) if description is None: description = safe_repr(args) env = None with sentry_sdk.start_span( op=OP.SUBPROCESS, name=description, origin="auto.subprocess.stdlib.subprocess", ) as span: for k, v in sentry_sdk.get_current_scope().iter_trace_propagation_headers( span=span ): if env is None: env = _init_argument( a, kw, "env", 10, lambda x: dict(x if x is not None else os.environ), ) env["SUBPROCESS_" + k.upper().replace("-", "_")] = v if cwd: span.set_data("subprocess.cwd", cwd) rv = old_popen_init(self, *a, **kw) span.set_tag("subprocess.pid", self.pid) return rv subprocess.Popen.__init__ = sentry_patched_popen_init # type: ignore old_popen_wait = subprocess.Popen.wait @ensure_integration_enabled(StdlibIntegration, old_popen_wait) def sentry_patched_popen_wait(self, *a, **kw): # type: (subprocess.Popen[Any], *Any, **Any) -> Any with sentry_sdk.start_span( op=OP.SUBPROCESS_WAIT, origin="auto.subprocess.stdlib.subprocess", ) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_wait(self, *a, **kw) subprocess.Popen.wait = sentry_patched_popen_wait # type: ignore old_popen_communicate = subprocess.Popen.communicate @ensure_integration_enabled(StdlibIntegration, old_popen_communicate) def sentry_patched_popen_communicate(self, *a, **kw): # type: (subprocess.Popen[Any], *Any, **Any) -> Any with sentry_sdk.start_span( op=OP.SUBPROCESS_COMMUNICATE, origin="auto.subprocess.stdlib.subprocess", ) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_communicate(self, *a, **kw) subprocess.Popen.communicate = sentry_patched_popen_communicate # type: ignore def get_subprocess_traceparent_headers(): # type: () -> EnvironHeaders return EnvironHeaders(os.environ, prefix="SUBPROCESS_") sentry-python-2.18.0/sentry_sdk/integrations/strawberry.py000066400000000000000000000363251471214654000241330ustar00rootroot00000000000000import functools import hashlib from inspect import isawaitable import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, logger, package_version, _get_installed_modules, ) try: from functools import cached_property except ImportError: # The strawberry integration requires Python 3.8+. functools.cached_property # was added in 3.8, so this check is technically not needed, but since this # is an auto-enabling integration, we might get to executing this import in # lower Python versions, so we need to deal with it. raise DidNotEnable("strawberry-graphql integration requires Python 3.8 or newer") try: import strawberry.schema.schema as strawberry_schema # type: ignore from strawberry import Schema from strawberry.extensions import SchemaExtension # type: ignore from strawberry.extensions.tracing.utils import should_skip_tracing as strawberry_should_skip_tracing # type: ignore from strawberry.http import async_base_view, sync_base_view # type: ignore except ImportError: raise DidNotEnable("strawberry-graphql is not installed") try: from strawberry.extensions.tracing import ( # type: ignore SentryTracingExtension as StrawberrySentryAsyncExtension, SentryTracingExtensionSync as StrawberrySentrySyncExtension, ) except ImportError: StrawberrySentryAsyncExtension = None StrawberrySentrySyncExtension = None from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable, Generator, List, Optional, Union from graphql import GraphQLError, GraphQLResolveInfo # type: ignore from strawberry.http import GraphQLHTTPResponse from strawberry.types import ExecutionContext, ExecutionResult, SubscriptionExecutionResult # type: ignore from sentry_sdk._types import Event, EventProcessor ignore_logger("strawberry.execution") class StrawberryIntegration(Integration): identifier = "strawberry" origin = f"auto.graphql.{identifier}" def __init__(self, async_execution=None): # type: (Optional[bool]) -> None if async_execution not in (None, False, True): raise ValueError( 'Invalid value for async_execution: "{}" (must be bool)'.format( async_execution ) ) self.async_execution = async_execution @staticmethod def setup_once(): # type: () -> None version = package_version("strawberry-graphql") if version is None: raise DidNotEnable( "Unparsable strawberry-graphql version: {}".format(version) ) if version < (0, 209, 5): raise DidNotEnable("strawberry-graphql 0.209.5 or newer required.") _patch_schema_init() _patch_execute() _patch_views() def _patch_schema_init(): # type: () -> None old_schema_init = Schema.__init__ @functools.wraps(old_schema_init) def _sentry_patched_schema_init(self, *args, **kwargs): # type: (Schema, Any, Any) -> None integration = sentry_sdk.get_client().get_integration(StrawberryIntegration) if integration is None: return old_schema_init(self, *args, **kwargs) extensions = kwargs.get("extensions") or [] if integration.async_execution is not None: should_use_async_extension = integration.async_execution else: # try to figure it out ourselves should_use_async_extension = _guess_if_using_async(extensions) logger.info( "Assuming strawberry is running %s. If not, initialize it as StrawberryIntegration(async_execution=%s).", "async" if should_use_async_extension else "sync", "False" if should_use_async_extension else "True", ) # remove the built in strawberry sentry extension, if present extensions = [ extension for extension in extensions if extension not in (StrawberrySentryAsyncExtension, StrawberrySentrySyncExtension) ] # add our extension extensions.append( SentryAsyncExtension if should_use_async_extension else SentrySyncExtension ) kwargs["extensions"] = extensions return old_schema_init(self, *args, **kwargs) Schema.__init__ = _sentry_patched_schema_init class SentryAsyncExtension(SchemaExtension): # type: ignore def __init__( self, *, execution_context=None, ): # type: (Any, Optional[ExecutionContext]) -> None if execution_context: self.execution_context = execution_context @cached_property def _resource_name(self): # type: () -> str query_hash = self.hash_query(self.execution_context.query) if self.execution_context.operation_name: return "{}:{}".format(self.execution_context.operation_name, query_hash) return query_hash def hash_query(self, query): # type: (str) -> str return hashlib.md5(query.encode("utf-8")).hexdigest() def on_operation(self): # type: () -> Generator[None, None, None] self._operation_name = self.execution_context.operation_name operation_type = "query" op = OP.GRAPHQL_QUERY if self.execution_context.query is None: self.execution_context.query = "" if self.execution_context.query.strip().startswith("mutation"): operation_type = "mutation" op = OP.GRAPHQL_MUTATION elif self.execution_context.query.strip().startswith("subscription"): operation_type = "subscription" op = OP.GRAPHQL_SUBSCRIPTION description = operation_type if self._operation_name: description += " {}".format(self._operation_name) sentry_sdk.add_breadcrumb( category="graphql.operation", data={ "operation_name": self._operation_name, "operation_type": operation_type, }, ) span = sentry_sdk.get_current_span() if span: self.graphql_span = span.start_child( op=op, name=description, origin=StrawberryIntegration.origin, ) else: self.graphql_span = sentry_sdk.start_span( op=op, name=description, origin=StrawberryIntegration.origin, ) self.graphql_span.set_data("graphql.operation.type", operation_type) self.graphql_span.set_data("graphql.operation.name", self._operation_name) self.graphql_span.set_data("graphql.document", self.execution_context.query) self.graphql_span.set_data("graphql.resource_name", self._resource_name) yield transaction = self.graphql_span.containing_transaction if transaction and self.execution_context.operation_name: transaction.name = self.execution_context.operation_name transaction.source = TRANSACTION_SOURCE_COMPONENT transaction.op = op self.graphql_span.finish() def on_validate(self): # type: () -> Generator[None, None, None] self.validation_span = self.graphql_span.start_child( op=OP.GRAPHQL_VALIDATE, name="validation", origin=StrawberryIntegration.origin, ) yield self.validation_span.finish() def on_parse(self): # type: () -> Generator[None, None, None] self.parsing_span = self.graphql_span.start_child( op=OP.GRAPHQL_PARSE, name="parsing", origin=StrawberryIntegration.origin, ) yield self.parsing_span.finish() def should_skip_tracing(self, _next, info): # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], GraphQLResolveInfo) -> bool return strawberry_should_skip_tracing(_next, info) async def _resolve(self, _next, root, info, *args, **kwargs): # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any result = _next(root, info, *args, **kwargs) if isawaitable(result): result = await result return result async def resolve(self, _next, root, info, *args, **kwargs): # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any if self.should_skip_tracing(_next, info): return await self._resolve(_next, root, info, *args, **kwargs) field_path = "{}.{}".format(info.parent_type, info.field_name) with self.graphql_span.start_child( op=OP.GRAPHQL_RESOLVE, name="resolving {}".format(field_path), origin=StrawberryIntegration.origin, ) as span: span.set_data("graphql.field_name", info.field_name) span.set_data("graphql.parent_type", info.parent_type.name) span.set_data("graphql.field_path", field_path) span.set_data("graphql.path", ".".join(map(str, info.path.as_list()))) return await self._resolve(_next, root, info, *args, **kwargs) class SentrySyncExtension(SentryAsyncExtension): def resolve(self, _next, root, info, *args, **kwargs): # type: (Callable[[Any, Any, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any if self.should_skip_tracing(_next, info): return _next(root, info, *args, **kwargs) field_path = "{}.{}".format(info.parent_type, info.field_name) with self.graphql_span.start_child( op=OP.GRAPHQL_RESOLVE, name="resolving {}".format(field_path), origin=StrawberryIntegration.origin, ) as span: span.set_data("graphql.field_name", info.field_name) span.set_data("graphql.parent_type", info.parent_type.name) span.set_data("graphql.field_path", field_path) span.set_data("graphql.path", ".".join(map(str, info.path.as_list()))) return _next(root, info, *args, **kwargs) def _patch_execute(): # type: () -> None old_execute_async = strawberry_schema.execute old_execute_sync = strawberry_schema.execute_sync async def _sentry_patched_execute_async(*args, **kwargs): # type: (Any, Any) -> Union[ExecutionResult, SubscriptionExecutionResult] result = await old_execute_async(*args, **kwargs) if sentry_sdk.get_client().get_integration(StrawberryIntegration) is None: return result if "execution_context" in kwargs: scope = sentry_sdk.get_isolation_scope() event_processor = _make_request_event_processor(kwargs["execution_context"]) scope.add_event_processor(event_processor) return result @ensure_integration_enabled(StrawberryIntegration, old_execute_sync) def _sentry_patched_execute_sync(*args, **kwargs): # type: (Any, Any) -> ExecutionResult result = old_execute_sync(*args, **kwargs) if "execution_context" in kwargs: scope = sentry_sdk.get_isolation_scope() event_processor = _make_request_event_processor(kwargs["execution_context"]) scope.add_event_processor(event_processor) return result strawberry_schema.execute = _sentry_patched_execute_async strawberry_schema.execute_sync = _sentry_patched_execute_sync def _patch_views(): # type: () -> None old_async_view_handle_errors = async_base_view.AsyncBaseHTTPView._handle_errors old_sync_view_handle_errors = sync_base_view.SyncBaseHTTPView._handle_errors def _sentry_patched_async_view_handle_errors(self, errors, response_data): # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None old_async_view_handle_errors(self, errors, response_data) _sentry_patched_handle_errors(self, errors, response_data) def _sentry_patched_sync_view_handle_errors(self, errors, response_data): # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None old_sync_view_handle_errors(self, errors, response_data) _sentry_patched_handle_errors(self, errors, response_data) @ensure_integration_enabled(StrawberryIntegration) def _sentry_patched_handle_errors(self, errors, response_data): # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None if not errors: return scope = sentry_sdk.get_isolation_scope() event_processor = _make_response_event_processor(response_data) scope.add_event_processor(event_processor) with capture_internal_exceptions(): for error in errors: event, hint = event_from_exception( error, client_options=sentry_sdk.get_client().options, mechanism={ "type": StrawberryIntegration.identifier, "handled": False, }, ) sentry_sdk.capture_event(event, hint=hint) async_base_view.AsyncBaseHTTPView._handle_errors = ( _sentry_patched_async_view_handle_errors ) sync_base_view.SyncBaseHTTPView._handle_errors = ( _sentry_patched_sync_view_handle_errors ) def _make_request_event_processor(execution_context): # type: (ExecutionContext) -> EventProcessor def inner(event, hint): # type: (Event, dict[str, Any]) -> Event with capture_internal_exceptions(): if should_send_default_pii(): request_data = event.setdefault("request", {}) request_data["api_target"] = "graphql" if not request_data.get("data"): data = {"query": execution_context.query} if execution_context.variables: data["variables"] = execution_context.variables if execution_context.operation_name: data["operationName"] = execution_context.operation_name request_data["data"] = data else: try: del event["request"]["data"] except (KeyError, TypeError): pass return event return inner def _make_response_event_processor(response_data): # type: (GraphQLHTTPResponse) -> EventProcessor def inner(event, hint): # type: (Event, dict[str, Any]) -> Event with capture_internal_exceptions(): if should_send_default_pii(): contexts = event.setdefault("contexts", {}) contexts["response"] = {"data": response_data} return event return inner def _guess_if_using_async(extensions): # type: (List[SchemaExtension]) -> bool if StrawberrySentryAsyncExtension in extensions: return True elif StrawberrySentrySyncExtension in extensions: return False return bool( {"starlette", "starlite", "litestar", "fastapi"} & set(_get_installed_modules()) ) sentry-python-2.18.0/sentry_sdk/integrations/sys_exit.py000066400000000000000000000046751471214654000236010ustar00rootroot00000000000000import functools import sys import sentry_sdk from sentry_sdk.utils import capture_internal_exceptions, event_from_exception from sentry_sdk.integrations import Integration from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Callable from typing import NoReturn, Union class SysExitIntegration(Integration): """Captures sys.exit calls and sends them as events to Sentry. By default, SystemExit exceptions are not captured by the SDK. Enabling this integration will capture SystemExit exceptions generated by sys.exit calls and send them to Sentry. This integration, in its default configuration, only captures the sys.exit call if the exit code is a non-zero and non-None value (unsuccessful exits). Pass `capture_successful_exits=True` to capture successful exits as well. Note that the integration does not capture SystemExit exceptions raised outside a call to sys.exit. """ identifier = "sys_exit" def __init__(self, *, capture_successful_exits=False): # type: (bool) -> None self._capture_successful_exits = capture_successful_exits @staticmethod def setup_once(): # type: () -> None SysExitIntegration._patch_sys_exit() @staticmethod def _patch_sys_exit(): # type: () -> None old_exit = sys.exit # type: Callable[[Union[str, int, None]], NoReturn] @functools.wraps(old_exit) def sentry_patched_exit(__status=0): # type: (Union[str, int, None]) -> NoReturn # @ensure_integration_enabled ensures that this is non-None integration = sentry_sdk.get_client().get_integration(SysExitIntegration) if integration is None: old_exit(__status) try: old_exit(__status) except SystemExit as e: with capture_internal_exceptions(): if integration._capture_successful_exits or __status not in ( 0, None, ): _capture_exception(e) raise e sys.exit = sentry_patched_exit def _capture_exception(exc): # type: (SystemExit) -> None event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, mechanism={"type": SysExitIntegration.identifier, "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) sentry-python-2.18.0/sentry_sdk/integrations/threading.py000066400000000000000000000076531471214654000236760ustar00rootroot00000000000000import sys from functools import wraps from threading import Thread, current_thread import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.scope import use_isolation_scope, use_scope from sentry_sdk.utils import ( event_from_exception, capture_internal_exceptions, logger, reraise, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import TypeVar from typing import Callable from typing import Optional from sentry_sdk._types import ExcInfo F = TypeVar("F", bound=Callable[..., Any]) class ThreadingIntegration(Integration): identifier = "threading" def __init__(self, propagate_hub=None, propagate_scope=True): # type: (Optional[bool], bool) -> None if propagate_hub is not None: logger.warning( "Deprecated: propagate_hub is deprecated. This will be removed in the future." ) # Note: propagate_hub did not have any effect on propagation of scope data # scope data was always propagated no matter what the value of propagate_hub was # This is why the default for propagate_scope is True self.propagate_scope = propagate_scope if propagate_hub is not None: self.propagate_scope = propagate_hub @staticmethod def setup_once(): # type: () -> None old_start = Thread.start @wraps(old_start) def sentry_start(self, *a, **kw): # type: (Thread, *Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(ThreadingIntegration) if integration is None: return old_start(self, *a, **kw) if integration.propagate_scope: isolation_scope = sentry_sdk.get_isolation_scope() current_scope = sentry_sdk.get_current_scope() else: isolation_scope = None current_scope = None # Patching instance methods in `start()` creates a reference cycle if # done in a naive way. See # https://github.com/getsentry/sentry-python/pull/434 # # In threading module, using current_thread API will access current thread instance # without holding it to avoid a reference cycle in an easier way. with capture_internal_exceptions(): new_run = _wrap_run( isolation_scope, current_scope, getattr(self.run, "__func__", self.run), ) self.run = new_run # type: ignore return old_start(self, *a, **kw) Thread.start = sentry_start # type: ignore def _wrap_run(isolation_scope_to_use, current_scope_to_use, old_run_func): # type: (Optional[sentry_sdk.Scope], Optional[sentry_sdk.Scope], F) -> F @wraps(old_run_func) def run(*a, **kw): # type: (*Any, **Any) -> Any def _run_old_run_func(): # type: () -> Any try: self = current_thread() return old_run_func(self, *a, **kw) except Exception: reraise(*_capture_exception()) if isolation_scope_to_use is not None and current_scope_to_use is not None: with use_isolation_scope(isolation_scope_to_use): with use_scope(current_scope_to_use): return _run_old_run_func() else: return _run_old_run_func() return run # type: ignore def _capture_exception(): # type: () -> ExcInfo exc_info = sys.exc_info() client = sentry_sdk.get_client() if client.get_integration(ThreadingIntegration) is not None: event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "threading", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) return exc_info sentry-python-2.18.0/sentry_sdk/integrations/tornado.py000066400000000000000000000161331471214654000233700ustar00rootroot00000000000000import weakref import contextlib from inspect import iscoroutinefunction import sentry_sdk from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import ( TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_ROUTE, ) from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, ensure_integration_enabled, event_from_exception, capture_internal_exceptions, transaction_from_function, ) from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import ( RequestExtractor, _filter_headers, _is_json_content_type, ) from sentry_sdk.integrations.logging import ignore_logger try: from tornado import version_info as TORNADO_VERSION from tornado.web import RequestHandler, HTTPError from tornado.gen import coroutine except ImportError: raise DidNotEnable("Tornado not installed") from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Optional from typing import Dict from typing import Callable from typing import Generator from sentry_sdk._types import Event, EventProcessor class TornadoIntegration(Integration): identifier = "tornado" origin = f"auto.http.{identifier}" @staticmethod def setup_once(): # type: () -> None if TORNADO_VERSION < (6, 0): raise DidNotEnable("Tornado 6.0+ required") if not HAS_REAL_CONTEXTVARS: # Tornado is async. We better have contextvars or we're going to leak # state between requests. raise DidNotEnable( "The tornado integration for Sentry requires Python 3.7+ or the aiocontextvars package" + CONTEXTVARS_ERROR_MESSAGE ) ignore_logger("tornado.access") old_execute = RequestHandler._execute awaitable = iscoroutinefunction(old_execute) if awaitable: # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await) # In that case our method should be a coroutine function too async def sentry_execute_request_handler(self, *args, **kwargs): # type: (RequestHandler, *Any, **Any) -> Any with _handle_request_impl(self): return await old_execute(self, *args, **kwargs) else: @coroutine # type: ignore def sentry_execute_request_handler(self, *args, **kwargs): # type: (RequestHandler, *Any, **Any) -> Any with _handle_request_impl(self): result = yield from old_execute(self, *args, **kwargs) return result RequestHandler._execute = sentry_execute_request_handler old_log_exception = RequestHandler.log_exception def sentry_log_exception(self, ty, value, tb, *args, **kwargs): # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any] _capture_exception(ty, value, tb) return old_log_exception(self, ty, value, tb, *args, **kwargs) RequestHandler.log_exception = sentry_log_exception @contextlib.contextmanager def _handle_request_impl(self): # type: (RequestHandler) -> Generator[None, None, None] integration = sentry_sdk.get_client().get_integration(TornadoIntegration) if integration is None: yield weak_handler = weakref.ref(self) with sentry_sdk.isolation_scope() as scope: headers = self.request.headers scope.clear_breadcrumbs() processor = _make_event_processor(weak_handler) scope.add_event_processor(processor) transaction = continue_trace( headers, op=OP.HTTP_SERVER, # Like with all other integrations, this is our # fallback transaction in case there is no route. # sentry_urldispatcher_resolve is responsible for # setting a transaction name later. name="generic Tornado request", source=TRANSACTION_SOURCE_ROUTE, origin=TornadoIntegration.origin, ) with sentry_sdk.start_transaction( transaction, custom_sampling_context={"tornado_request": self.request} ): yield @ensure_integration_enabled(TornadoIntegration) def _capture_exception(ty, value, tb): # type: (type, BaseException, Any) -> None if isinstance(value, HTTPError): return event, hint = event_from_exception( (ty, value, tb), client_options=sentry_sdk.get_client().options, mechanism={"type": "tornado", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _make_event_processor(weak_handler): # type: (Callable[[], RequestHandler]) -> EventProcessor def tornado_processor(event, hint): # type: (Event, dict[str, Any]) -> Event handler = weak_handler() if handler is None: return event request = handler.request with capture_internal_exceptions(): method = getattr(handler, handler.request.method.lower()) event["transaction"] = transaction_from_function(method) or "" event["transaction_info"] = {"source": TRANSACTION_SOURCE_COMPONENT} with capture_internal_exceptions(): extractor = TornadoRequestExtractor(request) extractor.extract_into_event(event) request_info = event["request"] request_info["url"] = "%s://%s%s" % ( request.protocol, request.host, request.path, ) request_info["query_string"] = request.query request_info["method"] = request.method request_info["env"] = {"REMOTE_ADDR": request.remote_ip} request_info["headers"] = _filter_headers(dict(request.headers)) with capture_internal_exceptions(): if handler.current_user and should_send_default_pii(): event.setdefault("user", {}).setdefault("is_authenticated", True) return event return tornado_processor class TornadoRequestExtractor(RequestExtractor): def content_length(self): # type: () -> int if self.request.body is None: return 0 return len(self.request.body) def cookies(self): # type: () -> Dict[str, str] return {k: v.value for k, v in self.request.cookies.items()} def raw_data(self): # type: () -> bytes return self.request.body def form(self): # type: () -> Dict[str, Any] return { k: [v.decode("latin1", "replace") for v in vs] for k, vs in self.request.body_arguments.items() } def is_json(self): # type: () -> bool return _is_json_content_type(self.request.headers.get("content-type")) def files(self): # type: () -> Dict[str, Any] return {k: v[0] for k, v in self.request.files.items() if v} def size_of_file(self, file): # type: (Any) -> int return len(file.body or ()) sentry-python-2.18.0/sentry_sdk/integrations/trytond.py000066400000000000000000000031631471214654000234240ustar00rootroot00000000000000import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.utils import ensure_integration_enabled, event_from_exception from trytond.exceptions import TrytonException # type: ignore from trytond.wsgi import app # type: ignore # TODO: trytond-worker, trytond-cron and trytond-admin intergations class TrytondWSGIIntegration(Integration): identifier = "trytond_wsgi" origin = f"auto.http.{identifier}" def __init__(self): # type: () -> None pass @staticmethod def setup_once(): # type: () -> None app.wsgi_app = SentryWsgiMiddleware( app.wsgi_app, span_origin=TrytondWSGIIntegration.origin, ) @ensure_integration_enabled(TrytondWSGIIntegration) def error_handler(e): # type: (Exception) -> None if isinstance(e, TrytonException): return else: client = sentry_sdk.get_client() event, hint = event_from_exception( e, client_options=client.options, mechanism={"type": "trytond", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) # Expected error handlers signature was changed # when the error_handler decorator was introduced # in Tryton-5.4 if hasattr(app, "error_handler"): @app.error_handler def _(app, request, e): # type: ignore error_handler(e) else: app.error_handlers.append(error_handler) sentry-python-2.18.0/sentry_sdk/integrations/wsgi.py000066400000000000000000000250031471214654000226670ustar00rootroot00000000000000import sys from functools import partial import sentry_sdk from sentry_sdk._werkzeug import get_host, _get_headers from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, _filter_headers, nullcontext, ) from sentry_sdk.sessions import track_session from sentry_sdk.scope import use_isolation_scope from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE from sentry_sdk.utils import ( ContextVar, capture_internal_exceptions, event_from_exception, reraise, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Callable from typing import Dict from typing import Iterator from typing import Any from typing import Tuple from typing import Optional from typing import TypeVar from typing import Protocol from sentry_sdk.utils import ExcInfo from sentry_sdk._types import Event, EventProcessor WsgiResponseIter = TypeVar("WsgiResponseIter") WsgiResponseHeaders = TypeVar("WsgiResponseHeaders") WsgiExcInfo = TypeVar("WsgiExcInfo") class StartResponse(Protocol): def __call__(self, status, response_headers, exc_info=None): # type: ignore # type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter pass _wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied") def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): # type: (str, str, str) -> str return s.encode("latin1").decode(charset, errors) def get_request_url(environ, use_x_forwarded_for=False): # type: (Dict[str, str], bool) -> str """Return the absolute URL without query string for the given WSGI environment.""" script_name = environ.get("SCRIPT_NAME", "").rstrip("/") path_info = environ.get("PATH_INFO", "").lstrip("/") path = f"{script_name}/{path_info}" return "%s://%s/%s" % ( environ.get("wsgi.url_scheme"), get_host(environ, use_x_forwarded_for), wsgi_decoding_dance(path).lstrip("/"), ) class SentryWsgiMiddleware: __slots__ = ( "app", "use_x_forwarded_for", "span_origin", "http_methods_to_capture", ) def __init__( self, app, # type: Callable[[Dict[str, str], Callable[..., Any]], Any] use_x_forwarded_for=False, # type: bool span_origin="manual", # type: str http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: Tuple[str, ...] ): # type: (...) -> None self.app = app self.use_x_forwarded_for = use_x_forwarded_for self.span_origin = span_origin self.http_methods_to_capture = http_methods_to_capture def __call__(self, environ, start_response): # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse if _wsgi_middleware_applied.get(False): return self.app(environ, start_response) _wsgi_middleware_applied.set(True) try: with sentry_sdk.isolation_scope() as scope: with track_session(scope, session_mode="request"): with capture_internal_exceptions(): scope.clear_breadcrumbs() scope._name = "wsgi" scope.add_event_processor( _make_wsgi_event_processor( environ, self.use_x_forwarded_for ) ) method = environ.get("REQUEST_METHOD", "").upper() transaction = None if method in self.http_methods_to_capture: transaction = continue_trace( environ, op=OP.HTTP_SERVER, name="generic WSGI request", source=TRANSACTION_SOURCE_ROUTE, origin=self.span_origin, ) with ( sentry_sdk.start_transaction( transaction, custom_sampling_context={"wsgi_environ": environ}, ) if transaction is not None else nullcontext() ): try: response = self.app( environ, partial( _sentry_start_response, start_response, transaction ), ) except BaseException: reraise(*_capture_exception()) finally: _wsgi_middleware_applied.set(False) return _ScopedResponse(scope, response) def _sentry_start_response( # type: ignore old_start_response, # type: StartResponse transaction, # type: Optional[Transaction] status, # type: str response_headers, # type: WsgiResponseHeaders exc_info=None, # type: Optional[WsgiExcInfo] ): # type: (...) -> WsgiResponseIter with capture_internal_exceptions(): status_int = int(status.split(" ", 1)[0]) if transaction is not None: transaction.set_http_status(status_int) if exc_info is None: # The Django Rest Framework WSGI test client, and likely other # (incorrect) implementations, cannot deal with the exc_info argument # if one is present. Avoid providing a third argument if not necessary. return old_start_response(status, response_headers) else: return old_start_response(status, response_headers, exc_info) def _get_environ(environ): # type: (Dict[str, str]) -> Iterator[Tuple[str, str]] """ Returns our explicitly included environment variables we want to capture (server name, port and remote addr if pii is enabled). """ keys = ["SERVER_NAME", "SERVER_PORT"] if should_send_default_pii(): # make debugging of proxy setup easier. Proxy headers are # in headers. keys += ["REMOTE_ADDR"] for key in keys: if key in environ: yield key, environ[key] def get_client_ip(environ): # type: (Dict[str, str]) -> Optional[Any] """ Infer the user IP address from various headers. This cannot be used in security sensitive situations since the value may be forged from a client, but it's good enough for the event payload. """ try: return environ["HTTP_X_FORWARDED_FOR"].split(",")[0].strip() except (KeyError, IndexError): pass try: return environ["HTTP_X_REAL_IP"] except KeyError: pass return environ.get("REMOTE_ADDR") def _capture_exception(): # type: () -> ExcInfo """ Captures the current exception and sends it to Sentry. Returns the ExcInfo tuple to it can be reraised afterwards. """ exc_info = sys.exc_info() e = exc_info[1] # SystemExit(0) is the only uncaught exception that is expected behavior should_skip_capture = isinstance(e, SystemExit) and e.code in (0, None) if not should_skip_capture: event, hint = event_from_exception( exc_info, client_options=sentry_sdk.get_client().options, mechanism={"type": "wsgi", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) return exc_info class _ScopedResponse: """ Users a separate scope for each response chunk. This will make WSGI apps more tolerant against: - WSGI servers streaming responses from a different thread/from different threads than the one that called start_response - close() not being called - WSGI servers streaming responses interleaved from the same thread """ __slots__ = ("_response", "_scope") def __init__(self, scope, response): # type: (sentry_sdk.scope.Scope, Iterator[bytes]) -> None self._scope = scope self._response = response def __iter__(self): # type: () -> Iterator[bytes] iterator = iter(self._response) while True: with use_isolation_scope(self._scope): try: chunk = next(iterator) except StopIteration: break except BaseException: reraise(*_capture_exception()) yield chunk def close(self): # type: () -> None with use_isolation_scope(self._scope): try: self._response.close() # type: ignore except AttributeError: pass except BaseException: reraise(*_capture_exception()) def _make_wsgi_event_processor(environ, use_x_forwarded_for): # type: (Dict[str, str], bool) -> EventProcessor # It's a bit unfortunate that we have to extract and parse the request data # from the environ so eagerly, but there are a few good reasons for this. # # We might be in a situation where the scope never gets torn down # properly. In that case we will have an unnecessary strong reference to # all objects in the environ (some of which may take a lot of memory) when # we're really just interested in a few of them. # # Keeping the environment around for longer than the request lifecycle is # also not necessarily something uWSGI can deal with: # https://github.com/unbit/uwsgi/issues/1950 client_ip = get_client_ip(environ) request_url = get_request_url(environ, use_x_forwarded_for) query_string = environ.get("QUERY_STRING") method = environ.get("REQUEST_METHOD") env = dict(_get_environ(environ)) headers = _filter_headers(dict(_get_headers(environ))) def event_processor(event, hint): # type: (Event, Dict[str, Any]) -> Event with capture_internal_exceptions(): # if the code below fails halfway through we at least have some data request_info = event.setdefault("request", {}) if should_send_default_pii(): user_info = event.setdefault("user", {}) if client_ip: user_info.setdefault("ip_address", client_ip) request_info["url"] = request_url request_info["query_string"] = query_string request_info["method"] = method request_info["env"] = env request_info["headers"] = headers return event return event_processor sentry-python-2.18.0/sentry_sdk/metrics.py000066400000000000000000000725111471214654000206640ustar00rootroot00000000000000import io import os import random import re import sys import threading import time import warnings import zlib from abc import ABC, abstractmethod from contextlib import contextmanager from datetime import datetime, timezone from functools import wraps, partial import sentry_sdk from sentry_sdk.utils import ( ContextVar, now, nanosecond_time, to_timestamp, serialize_frame, json_dumps, ) from sentry_sdk.envelope import Envelope, Item from sentry_sdk.tracing import ( TRANSACTION_SOURCE_ROUTE, TRANSACTION_SOURCE_VIEW, TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_TASK, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Generator from typing import Iterable from typing import List from typing import Optional from typing import Set from typing import Tuple from typing import Union from sentry_sdk._types import BucketKey from sentry_sdk._types import DurationUnit from sentry_sdk._types import FlushedMetricValue from sentry_sdk._types import MeasurementUnit from sentry_sdk._types import MetricMetaKey from sentry_sdk._types import MetricTagValue from sentry_sdk._types import MetricTags from sentry_sdk._types import MetricTagsInternal from sentry_sdk._types import MetricType from sentry_sdk._types import MetricValue warnings.warn( "The sentry_sdk.metrics module is deprecated and will be removed in the next major release. " "Sentry will reject all metrics sent after October 7, 2024. " "Learn more: https://sentry.zendesk.com/hc/en-us/articles/26369339769883-Upcoming-API-Changes-to-Metrics", DeprecationWarning, stacklevel=2, ) _in_metrics = ContextVar("in_metrics", default=False) _set = set # set is shadowed below GOOD_TRANSACTION_SOURCES = frozenset( [ TRANSACTION_SOURCE_ROUTE, TRANSACTION_SOURCE_VIEW, TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_TASK, ] ) _sanitize_unit = partial(re.compile(r"[^a-zA-Z0-9_]+").sub, "") _sanitize_metric_key = partial(re.compile(r"[^a-zA-Z0-9_\-.]+").sub, "_") _sanitize_tag_key = partial(re.compile(r"[^a-zA-Z0-9_\-.\/]+").sub, "") def _sanitize_tag_value(value): # type: (str) -> str table = str.maketrans( { "\n": "\\n", "\r": "\\r", "\t": "\\t", "\\": "\\\\", "|": "\\u{7c}", ",": "\\u{2c}", } ) return value.translate(table) def get_code_location(stacklevel): # type: (int) -> Optional[Dict[str, Any]] try: frm = sys._getframe(stacklevel) except Exception: return None return serialize_frame( frm, include_local_variables=False, include_source_context=True ) @contextmanager def recursion_protection(): # type: () -> Generator[bool, None, None] """Enters recursion protection and returns the old flag.""" old_in_metrics = _in_metrics.get() _in_metrics.set(True) try: yield old_in_metrics finally: _in_metrics.set(old_in_metrics) def metrics_noop(func): # type: (Any) -> Any """Convenient decorator that uses `recursion_protection` to make a function a noop. """ @wraps(func) def new_func(*args, **kwargs): # type: (*Any, **Any) -> Any with recursion_protection() as in_metrics: if not in_metrics: return func(*args, **kwargs) return new_func class Metric(ABC): __slots__ = () @abstractmethod def __init__(self, first): # type: (MetricValue) -> None pass @property @abstractmethod def weight(self): # type: () -> int pass @abstractmethod def add(self, value): # type: (MetricValue) -> None pass @abstractmethod def serialize_value(self): # type: () -> Iterable[FlushedMetricValue] pass class CounterMetric(Metric): __slots__ = ("value",) def __init__( self, first # type: MetricValue ): # type: (...) -> None self.value = float(first) @property def weight(self): # type: (...) -> int return 1 def add( self, value # type: MetricValue ): # type: (...) -> None self.value += float(value) def serialize_value(self): # type: (...) -> Iterable[FlushedMetricValue] return (self.value,) class GaugeMetric(Metric): __slots__ = ( "last", "min", "max", "sum", "count", ) def __init__( self, first # type: MetricValue ): # type: (...) -> None first = float(first) self.last = first self.min = first self.max = first self.sum = first self.count = 1 @property def weight(self): # type: (...) -> int # Number of elements. return 5 def add( self, value # type: MetricValue ): # type: (...) -> None value = float(value) self.last = value self.min = min(self.min, value) self.max = max(self.max, value) self.sum += value self.count += 1 def serialize_value(self): # type: (...) -> Iterable[FlushedMetricValue] return ( self.last, self.min, self.max, self.sum, self.count, ) class DistributionMetric(Metric): __slots__ = ("value",) def __init__( self, first # type: MetricValue ): # type(...) -> None self.value = [float(first)] @property def weight(self): # type: (...) -> int return len(self.value) def add( self, value # type: MetricValue ): # type: (...) -> None self.value.append(float(value)) def serialize_value(self): # type: (...) -> Iterable[FlushedMetricValue] return self.value class SetMetric(Metric): __slots__ = ("value",) def __init__( self, first # type: MetricValue ): # type: (...) -> None self.value = {first} @property def weight(self): # type: (...) -> int return len(self.value) def add( self, value # type: MetricValue ): # type: (...) -> None self.value.add(value) def serialize_value(self): # type: (...) -> Iterable[FlushedMetricValue] def _hash(x): # type: (MetricValue) -> int if isinstance(x, str): return zlib.crc32(x.encode("utf-8")) & 0xFFFFFFFF return int(x) return (_hash(value) for value in self.value) def _encode_metrics(flushable_buckets): # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) -> bytes out = io.BytesIO() _write = out.write # Note on sanitization: we intentionally sanitize in emission (serialization) # and not during aggregation for performance reasons. This means that the # envelope can in fact have duplicate buckets stored. This is acceptable for # relay side emission and should not happen commonly. for timestamp, buckets in flushable_buckets: for bucket_key, metric in buckets.items(): metric_type, metric_name, metric_unit, metric_tags = bucket_key metric_name = _sanitize_metric_key(metric_name) metric_unit = _sanitize_unit(metric_unit) _write(metric_name.encode("utf-8")) _write(b"@") _write(metric_unit.encode("utf-8")) for serialized_value in metric.serialize_value(): _write(b":") _write(str(serialized_value).encode("utf-8")) _write(b"|") _write(metric_type.encode("ascii")) if metric_tags: _write(b"|#") first = True for tag_key, tag_value in metric_tags: tag_key = _sanitize_tag_key(tag_key) if not tag_key: continue if first: first = False else: _write(b",") _write(tag_key.encode("utf-8")) _write(b":") _write(_sanitize_tag_value(tag_value).encode("utf-8")) _write(b"|T") _write(str(timestamp).encode("ascii")) _write(b"\n") return out.getvalue() def _encode_locations(timestamp, code_locations): # type: (int, Iterable[Tuple[MetricMetaKey, Dict[str, Any]]]) -> bytes mapping = {} # type: Dict[str, List[Any]] for key, loc in code_locations: metric_type, name, unit = key mri = "{}:{}@{}".format( metric_type, _sanitize_metric_key(name), _sanitize_unit(unit) ) loc["type"] = "location" mapping.setdefault(mri, []).append(loc) return json_dumps({"timestamp": timestamp, "mapping": mapping}) METRIC_TYPES = { "c": CounterMetric, "g": GaugeMetric, "d": DistributionMetric, "s": SetMetric, } # type: dict[MetricType, type[Metric]] # some of these are dumb TIMING_FUNCTIONS = { "nanosecond": nanosecond_time, "microsecond": lambda: nanosecond_time() / 1000.0, "millisecond": lambda: nanosecond_time() / 1000000.0, "second": now, "minute": lambda: now() / 60.0, "hour": lambda: now() / 3600.0, "day": lambda: now() / 3600.0 / 24.0, "week": lambda: now() / 3600.0 / 24.0 / 7.0, } class LocalAggregator: __slots__ = ("_measurements",) def __init__(self): # type: (...) -> None self._measurements = ( {} ) # type: Dict[Tuple[str, MetricTagsInternal], Tuple[float, float, int, float]] def add( self, ty, # type: MetricType key, # type: str value, # type: float unit, # type: MeasurementUnit tags, # type: MetricTagsInternal ): # type: (...) -> None export_key = "%s:%s@%s" % (ty, key, unit) bucket_key = (export_key, tags) old = self._measurements.get(bucket_key) if old is not None: v_min, v_max, v_count, v_sum = old v_min = min(v_min, value) v_max = max(v_max, value) v_count += 1 v_sum += value else: v_min = v_max = v_sum = value v_count = 1 self._measurements[bucket_key] = (v_min, v_max, v_count, v_sum) def to_json(self): # type: (...) -> Dict[str, Any] rv = {} # type: Any for (export_key, tags), ( v_min, v_max, v_count, v_sum, ) in self._measurements.items(): rv.setdefault(export_key, []).append( { "tags": _tags_to_dict(tags), "min": v_min, "max": v_max, "count": v_count, "sum": v_sum, } ) return rv class MetricsAggregator: ROLLUP_IN_SECONDS = 10.0 MAX_WEIGHT = 100000 FLUSHER_SLEEP_TIME = 5.0 def __init__( self, capture_func, # type: Callable[[Envelope], None] enable_code_locations=False, # type: bool ): # type: (...) -> None self.buckets = {} # type: Dict[int, Any] self._enable_code_locations = enable_code_locations self._seen_locations = _set() # type: Set[Tuple[int, MetricMetaKey]] self._pending_locations = {} # type: Dict[int, List[Tuple[MetricMetaKey, Any]]] self._buckets_total_weight = 0 self._capture_func = capture_func self._running = True self._lock = threading.Lock() self._flush_event = threading.Event() # type: threading.Event self._force_flush = False # The aggregator shifts its flushing by up to an entire rollup window to # avoid multiple clients trampling on end of a 10 second window as all the # buckets are anchored to multiples of ROLLUP seconds. We randomize this # number once per aggregator boot to achieve some level of offsetting # across a fleet of deployed SDKs. Relay itself will also apply independent # jittering. self._flush_shift = random.random() * self.ROLLUP_IN_SECONDS self._flusher = None # type: Optional[threading.Thread] self._flusher_pid = None # type: Optional[int] def _ensure_thread(self): # type: (...) -> bool """For forking processes we might need to restart this thread. This ensures that our process actually has that thread running. """ if not self._running: return False pid = os.getpid() if self._flusher_pid == pid: return True with self._lock: # Recheck to make sure another thread didn't get here and start the # the flusher in the meantime if self._flusher_pid == pid: return True self._flusher_pid = pid self._flusher = threading.Thread(target=self._flush_loop) self._flusher.daemon = True try: self._flusher.start() except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self._running = False return False return True def _flush_loop(self): # type: (...) -> None _in_metrics.set(True) while self._running or self._force_flush: if self._running: self._flush_event.wait(self.FLUSHER_SLEEP_TIME) self._flush() def _flush(self): # type: (...) -> None self._emit(self._flushable_buckets(), self._flushable_locations()) def _flushable_buckets(self): # type: (...) -> (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) with self._lock: force_flush = self._force_flush cutoff = time.time() - self.ROLLUP_IN_SECONDS - self._flush_shift flushable_buckets = () # type: Iterable[Tuple[int, Dict[BucketKey, Metric]]] weight_to_remove = 0 if force_flush: flushable_buckets = self.buckets.items() self.buckets = {} self._buckets_total_weight = 0 self._force_flush = False else: flushable_buckets = [] for buckets_timestamp, buckets in self.buckets.items(): # If the timestamp of the bucket is newer that the rollup we want to skip it. if buckets_timestamp <= cutoff: flushable_buckets.append((buckets_timestamp, buckets)) # We will clear the elements while holding the lock, in order to avoid requesting it downstream again. for buckets_timestamp, buckets in flushable_buckets: for metric in buckets.values(): weight_to_remove += metric.weight del self.buckets[buckets_timestamp] self._buckets_total_weight -= weight_to_remove return flushable_buckets def _flushable_locations(self): # type: (...) -> Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] with self._lock: locations = self._pending_locations self._pending_locations = {} return locations @metrics_noop def add( self, ty, # type: MetricType key, # type: str value, # type: MetricValue unit, # type: MeasurementUnit tags, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] local_aggregator=None, # type: Optional[LocalAggregator] stacklevel=0, # type: Optional[int] ): # type: (...) -> None if not self._ensure_thread() or self._flusher is None: return None if timestamp is None: timestamp = time.time() elif isinstance(timestamp, datetime): timestamp = to_timestamp(timestamp) bucket_timestamp = int( (timestamp // self.ROLLUP_IN_SECONDS) * self.ROLLUP_IN_SECONDS ) serialized_tags = _serialize_tags(tags) bucket_key = ( ty, key, unit, serialized_tags, ) with self._lock: local_buckets = self.buckets.setdefault(bucket_timestamp, {}) metric = local_buckets.get(bucket_key) if metric is not None: previous_weight = metric.weight metric.add(value) else: metric = local_buckets[bucket_key] = METRIC_TYPES[ty](value) previous_weight = 0 added = metric.weight - previous_weight if stacklevel is not None: self.record_code_location(ty, key, unit, stacklevel + 2, timestamp) # Given the new weight we consider whether we want to force flush. self._consider_force_flush() # For sets, we only record that a value has been added to the set but not which one. # See develop docs: https://develop.sentry.dev/sdk/metrics/#sets if local_aggregator is not None: local_value = float(added if ty == "s" else value) local_aggregator.add(ty, key, local_value, unit, serialized_tags) def record_code_location( self, ty, # type: MetricType key, # type: str unit, # type: MeasurementUnit stacklevel, # type: int timestamp=None, # type: Optional[float] ): # type: (...) -> None if not self._enable_code_locations: return if timestamp is None: timestamp = time.time() meta_key = (ty, key, unit) start_of_day = datetime.fromtimestamp(timestamp, timezone.utc).replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=None ) start_of_day = int(to_timestamp(start_of_day)) if (start_of_day, meta_key) not in self._seen_locations: self._seen_locations.add((start_of_day, meta_key)) loc = get_code_location(stacklevel + 3) if loc is not None: # Group metadata by day to make flushing more efficient. # There needs to be one envelope item per timestamp. self._pending_locations.setdefault(start_of_day, []).append( (meta_key, loc) ) @metrics_noop def need_code_location( self, ty, # type: MetricType key, # type: str unit, # type: MeasurementUnit timestamp, # type: float ): # type: (...) -> bool if self._enable_code_locations: return False meta_key = (ty, key, unit) start_of_day = datetime.fromtimestamp(timestamp, timezone.utc).replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=None ) start_of_day = int(to_timestamp(start_of_day)) return (start_of_day, meta_key) not in self._seen_locations def kill(self): # type: (...) -> None if self._flusher is None: return self._running = False self._flush_event.set() self._flusher = None @metrics_noop def flush(self): # type: (...) -> None self._force_flush = True self._flush() def _consider_force_flush(self): # type: (...) -> None # It's important to acquire a lock around this method, since it will touch shared data structures. total_weight = len(self.buckets) + self._buckets_total_weight if total_weight >= self.MAX_WEIGHT: self._force_flush = True self._flush_event.set() def _emit( self, flushable_buckets, # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) code_locations, # type: Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] ): # type: (...) -> Optional[Envelope] envelope = Envelope() if flushable_buckets: encoded_metrics = _encode_metrics(flushable_buckets) envelope.add_item(Item(payload=encoded_metrics, type="statsd")) for timestamp, locations in code_locations.items(): encoded_locations = _encode_locations(timestamp, locations) envelope.add_item(Item(payload=encoded_locations, type="metric_meta")) if envelope.items: self._capture_func(envelope) return envelope return None def _serialize_tags( tags, # type: Optional[MetricTags] ): # type: (...) -> MetricTagsInternal if not tags: return () rv = [] for key, value in tags.items(): # If the value is a collection, we want to flatten it. if isinstance(value, (list, tuple)): for inner_value in value: if inner_value is not None: rv.append((key, str(inner_value))) elif value is not None: rv.append((key, str(value))) # It's very important to sort the tags in order to obtain the # same bucket key. return tuple(sorted(rv)) def _tags_to_dict(tags): # type: (MetricTagsInternal) -> Dict[str, Any] rv = {} # type: Dict[str, Any] for tag_name, tag_value in tags: old_value = rv.get(tag_name) if old_value is not None: if isinstance(old_value, list): old_value.append(tag_value) else: rv[tag_name] = [old_value, tag_value] else: rv[tag_name] = tag_value return rv def _get_aggregator(): # type: () -> Optional[MetricsAggregator] client = sentry_sdk.get_client() return ( client.metrics_aggregator if client.is_active() and client.metrics_aggregator is not None else None ) def _get_aggregator_and_update_tags(key, value, unit, tags): # type: (str, Optional[MetricValue], MeasurementUnit, Optional[MetricTags]) -> Tuple[Optional[MetricsAggregator], Optional[LocalAggregator], Optional[MetricTags]] client = sentry_sdk.get_client() if not client.is_active() or client.metrics_aggregator is None: return None, None, tags updated_tags = dict(tags or ()) # type: Dict[str, MetricTagValue] updated_tags.setdefault("release", client.options["release"]) updated_tags.setdefault("environment", client.options["environment"]) scope = sentry_sdk.get_current_scope() local_aggregator = None # We go with the low-level API here to access transaction information as # this one is the same between just errors and errors + performance transaction_source = scope._transaction_info.get("source") if transaction_source in GOOD_TRANSACTION_SOURCES: transaction_name = scope._transaction if transaction_name: updated_tags.setdefault("transaction", transaction_name) if scope._span is not None: local_aggregator = scope._span._get_local_aggregator() experiments = client.options.get("_experiments", {}) before_emit_callback = experiments.get("before_emit_metric") if before_emit_callback is not None: with recursion_protection() as in_metrics: if not in_metrics: if not before_emit_callback(key, value, unit, updated_tags): return None, None, updated_tags return client.metrics_aggregator, local_aggregator, updated_tags def increment( key, # type: str value=1.0, # type: float unit="none", # type: MeasurementUnit tags=None, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] stacklevel=0, # type: int ): # type: (...) -> None """Increments a counter.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( key, value, unit, tags ) if aggregator is not None: aggregator.add( "c", key, value, unit, tags, timestamp, local_aggregator, stacklevel ) # alias as incr is relatively common in python incr = increment class _Timing: def __init__( self, key, # type: str tags, # type: Optional[MetricTags] timestamp, # type: Optional[Union[float, datetime]] value, # type: Optional[float] unit, # type: DurationUnit stacklevel, # type: int ): # type: (...) -> None self.key = key self.tags = tags self.timestamp = timestamp self.value = value self.unit = unit self.entered = None # type: Optional[float] self._span = None # type: Optional[sentry_sdk.tracing.Span] self.stacklevel = stacklevel def _validate_invocation(self, context): # type: (str) -> None if self.value is not None: raise TypeError( "cannot use timing as %s when a value is provided" % context ) def __enter__(self): # type: (...) -> _Timing self.entered = TIMING_FUNCTIONS[self.unit]() self._validate_invocation("context-manager") self._span = sentry_sdk.start_span(op="metric.timing", name=self.key) if self.tags: for key, value in self.tags.items(): if isinstance(value, (tuple, list)): value = ",".join(sorted(map(str, value))) self._span.set_tag(key, value) self._span.__enter__() # report code locations here for better accuracy aggregator = _get_aggregator() if aggregator is not None: aggregator.record_code_location("d", self.key, self.unit, self.stacklevel) return self def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None assert self._span, "did not enter" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( self.key, self.value, self.unit, self.tags, ) if aggregator is not None: elapsed = TIMING_FUNCTIONS[self.unit]() - self.entered # type: ignore aggregator.add( "d", self.key, elapsed, self.unit, tags, self.timestamp, local_aggregator, None, # code locations are reported in __enter__ ) self._span.__exit__(exc_type, exc_value, tb) self._span = None def __call__(self, f): # type: (Any) -> Any self._validate_invocation("decorator") @wraps(f) def timed_func(*args, **kwargs): # type: (*Any, **Any) -> Any with timing( key=self.key, tags=self.tags, timestamp=self.timestamp, unit=self.unit, stacklevel=self.stacklevel + 1, ): return f(*args, **kwargs) return timed_func def timing( key, # type: str value=None, # type: Optional[float] unit="second", # type: DurationUnit tags=None, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] stacklevel=0, # type: int ): # type: (...) -> _Timing """Emits a distribution with the time it takes to run the given code block. This method supports three forms of invocation: - when a `value` is provided, it functions similar to `distribution` but with - it can be used as a context manager - it can be used as a decorator """ if value is not None: aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( key, value, unit, tags ) if aggregator is not None: aggregator.add( "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel ) return _Timing(key, tags, timestamp, value, unit, stacklevel) def distribution( key, # type: str value, # type: float unit="none", # type: MeasurementUnit tags=None, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] stacklevel=0, # type: int ): # type: (...) -> None """Emits a distribution.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( key, value, unit, tags ) if aggregator is not None: aggregator.add( "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel ) def set( key, # type: str value, # type: Union[int, str] unit="none", # type: MeasurementUnit tags=None, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] stacklevel=0, # type: int ): # type: (...) -> None """Emits a set.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( key, value, unit, tags ) if aggregator is not None: aggregator.add( "s", key, value, unit, tags, timestamp, local_aggregator, stacklevel ) def gauge( key, # type: str value, # type: float unit="none", # type: MeasurementUnit tags=None, # type: Optional[MetricTags] timestamp=None, # type: Optional[Union[float, datetime]] stacklevel=0, # type: int ): # type: (...) -> None """Emits a gauge.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( key, value, unit, tags ) if aggregator is not None: aggregator.add( "g", key, value, unit, tags, timestamp, local_aggregator, stacklevel ) sentry-python-2.18.0/sentry_sdk/monitor.py000066400000000000000000000071761471214654000207120ustar00rootroot00000000000000import os import time from threading import Thread, Lock import sentry_sdk from sentry_sdk.utils import logger from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional MAX_DOWNSAMPLE_FACTOR = 10 class Monitor: """ Performs health checks in a separate thread once every interval seconds and updates the internal state. Other parts of the SDK only read this state and act accordingly. """ name = "sentry.monitor" def __init__(self, transport, interval=10): # type: (sentry_sdk.transport.Transport, float) -> None self.transport = transport # type: sentry_sdk.transport.Transport self.interval = interval # type: float self._healthy = True self._downsample_factor = 0 # type: int self._thread = None # type: Optional[Thread] self._thread_lock = Lock() self._thread_for_pid = None # type: Optional[int] self._running = True def _ensure_running(self): # type: () -> None """ Check that the monitor has an active thread to run in, or create one if not. Note that this might fail (e.g. in Python 3.12 it's not possible to spawn new threads at interpreter shutdown). In that case self._running will be False after running this function. """ if self._thread_for_pid == os.getpid() and self._thread is not None: return None with self._thread_lock: if self._thread_for_pid == os.getpid() and self._thread is not None: return None def _thread(): # type: (...) -> None while self._running: time.sleep(self.interval) if self._running: self.run() thread = Thread(name=self.name, target=_thread) thread.daemon = True try: thread.start() except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self._running = False return None self._thread = thread self._thread_for_pid = os.getpid() return None def run(self): # type: () -> None self.check_health() self.set_downsample_factor() def set_downsample_factor(self): # type: () -> None if self._healthy: if self._downsample_factor > 0: logger.debug( "[Monitor] health check positive, reverting to normal sampling" ) self._downsample_factor = 0 else: if self.downsample_factor < MAX_DOWNSAMPLE_FACTOR: self._downsample_factor += 1 logger.debug( "[Monitor] health check negative, downsampling with a factor of %d", self._downsample_factor, ) def check_health(self): # type: () -> None """ Perform the actual health checks, currently only checks if the transport is rate-limited. TODO: augment in the future with more checks. """ self._healthy = self.transport.is_healthy() def is_healthy(self): # type: () -> bool self._ensure_running() return self._healthy @property def downsample_factor(self): # type: () -> int self._ensure_running() return self._downsample_factor def kill(self): # type: () -> None self._running = False def __del__(self): # type: () -> None self.kill() sentry-python-2.18.0/sentry_sdk/profiler/000077500000000000000000000000001471214654000204605ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/profiler/__init__.py000066400000000000000000000020471471214654000225740ustar00rootroot00000000000000from sentry_sdk.profiler.continuous_profiler import start_profiler, stop_profiler from sentry_sdk.profiler.transaction_profiler import ( MAX_PROFILE_DURATION_NS, PROFILE_MINIMUM_SAMPLES, Profile, Scheduler, ThreadScheduler, GeventScheduler, has_profiling_enabled, setup_profiler, teardown_profiler, ) from sentry_sdk.profiler.utils import ( DEFAULT_SAMPLING_FREQUENCY, MAX_STACK_DEPTH, get_frame_name, extract_frame, extract_stack, frame_id, ) __all__ = [ "start_profiler", "stop_profiler", # DEPRECATED: The following was re-exported for backwards compatibility. It # will be removed from sentry_sdk.profiler in a future release. "MAX_PROFILE_DURATION_NS", "PROFILE_MINIMUM_SAMPLES", "Profile", "Scheduler", "ThreadScheduler", "GeventScheduler", "has_profiling_enabled", "setup_profiler", "teardown_profiler", "DEFAULT_SAMPLING_FREQUENCY", "MAX_STACK_DEPTH", "get_frame_name", "extract_frame", "extract_stack", "frame_id", ] sentry-python-2.18.0/sentry_sdk/profiler/continuous_profiler.py000066400000000000000000000414031471214654000251440ustar00rootroot00000000000000import atexit import os import sys import threading import time import uuid from datetime import datetime, timezone from sentry_sdk.consts import VERSION from sentry_sdk.envelope import Envelope from sentry_sdk._lru_cache import LRUCache from sentry_sdk.profiler.utils import ( DEFAULT_SAMPLING_FREQUENCY, extract_stack, ) from sentry_sdk.utils import ( capture_internal_exception, is_gevent, logger, now, set_in_app_in_frames, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import List from typing import Optional from typing import Type from typing import Union from typing_extensions import TypedDict from sentry_sdk._types import ContinuousProfilerMode, SDKInfo from sentry_sdk.profiler.utils import ( ExtractedSample, FrameId, StackId, ThreadId, ProcessedFrame, ProcessedStack, ) ProcessedSample = TypedDict( "ProcessedSample", { "timestamp": float, "thread_id": ThreadId, "stack_id": int, }, ) try: from gevent.monkey import get_original from gevent.threadpool import ThreadPool as _ThreadPool ThreadPool = _ThreadPool # type: Optional[Type[_ThreadPool]] thread_sleep = get_original("time", "sleep") except ImportError: thread_sleep = time.sleep ThreadPool = None _scheduler = None # type: Optional[ContinuousScheduler] def setup_continuous_profiler(options, sdk_info, capture_func): # type: (Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> bool global _scheduler if _scheduler is not None: logger.debug("[Profiling] Continuous Profiler is already setup") return False if is_gevent(): # If gevent has patched the threading modules then we cannot rely on # them to spawn a native thread for sampling. # Instead we default to the GeventContinuousScheduler which is capable of # spawning native threads within gevent. default_profiler_mode = GeventContinuousScheduler.mode else: default_profiler_mode = ThreadContinuousScheduler.mode experiments = options.get("_experiments", {}) profiler_mode = ( experiments.get("continuous_profiling_mode") or default_profiler_mode ) frequency = DEFAULT_SAMPLING_FREQUENCY if profiler_mode == ThreadContinuousScheduler.mode: _scheduler = ThreadContinuousScheduler( frequency, options, sdk_info, capture_func ) elif profiler_mode == GeventContinuousScheduler.mode: _scheduler = GeventContinuousScheduler( frequency, options, sdk_info, capture_func ) else: raise ValueError("Unknown continuous profiler mode: {}".format(profiler_mode)) logger.debug( "[Profiling] Setting up continuous profiler in {mode} mode".format( mode=_scheduler.mode ) ) atexit.register(teardown_continuous_profiler) return True def try_autostart_continuous_profiler(): # type: () -> None if _scheduler is None: return # Ensure that the scheduler only autostarts once per process. # This is necessary because many web servers use forks to spawn # additional processes. And the profiler is only spawned on the # master process, then it often only profiles the main process # and not the ones where the requests are being handled. # # Additionally, we only want this autostart behaviour once per # process. If the user explicitly calls `stop_profiler`, it should # be respected and not start the profiler again. if not _scheduler.should_autostart(): return _scheduler.ensure_running() def start_profiler(): # type: () -> None if _scheduler is None: return _scheduler.ensure_running() def stop_profiler(): # type: () -> None if _scheduler is None: return _scheduler.teardown() def teardown_continuous_profiler(): # type: () -> None stop_profiler() global _scheduler _scheduler = None def get_profiler_id(): # type: () -> Union[str, None] if _scheduler is None: return None return _scheduler.profiler_id class ContinuousScheduler: mode = "unknown" # type: ContinuousProfilerMode def __init__(self, frequency, options, sdk_info, capture_func): # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None self.interval = 1.0 / frequency self.options = options self.sdk_info = sdk_info self.capture_func = capture_func self.sampler = self.make_sampler() self.buffer = None # type: Optional[ProfileBuffer] self.running = False def should_autostart(self): # type: () -> bool experiments = self.options.get("_experiments") if not experiments: return False return experiments.get("continuous_profiling_auto_start") def ensure_running(self): # type: () -> None raise NotImplementedError def teardown(self): # type: () -> None raise NotImplementedError def pause(self): # type: () -> None raise NotImplementedError def reset_buffer(self): # type: () -> None self.buffer = ProfileBuffer( self.options, self.sdk_info, PROFILE_BUFFER_SECONDS, self.capture_func ) @property def profiler_id(self): # type: () -> Union[str, None] if self.buffer is None: return None return self.buffer.profiler_id def make_sampler(self): # type: () -> Callable[..., None] cwd = os.getcwd() cache = LRUCache(max_size=256) def _sample_stack(*args, **kwargs): # type: (*Any, **Any) -> None """ Take a sample of the stack on all the threads in the process. This should be called at a regular interval to collect samples. """ ts = now() try: sample = [ (str(tid), extract_stack(frame, cache, cwd)) for tid, frame in sys._current_frames().items() ] except AttributeError: # For some reason, the frame we get doesn't have certain attributes. # When this happens, we abandon the current sample as it's bad. capture_internal_exception(sys.exc_info()) return if self.buffer is not None: self.buffer.write(ts, sample) return _sample_stack def run(self): # type: () -> None last = time.perf_counter() while self.running: self.sampler() # some time may have elapsed since the last time # we sampled, so we need to account for that and # not sleep for too long elapsed = time.perf_counter() - last if elapsed < self.interval: thread_sleep(self.interval - elapsed) # after sleeping, make sure to take the current # timestamp so we can use it next iteration last = time.perf_counter() if self.buffer is not None: self.buffer.flush() class ThreadContinuousScheduler(ContinuousScheduler): """ This scheduler is based on running a daemon thread that will call the sampler at a regular interval. """ mode = "thread" # type: ContinuousProfilerMode name = "sentry.profiler.ThreadContinuousScheduler" def __init__(self, frequency, options, sdk_info, capture_func): # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None super().__init__(frequency, options, sdk_info, capture_func) self.thread = None # type: Optional[threading.Thread] self.pid = None # type: Optional[int] self.lock = threading.Lock() def should_autostart(self): # type: () -> bool return super().should_autostart() and self.pid != os.getpid() def ensure_running(self): # type: () -> None pid = os.getpid() # is running on the right process if self.running and self.pid == pid: return with self.lock: # another thread may have tried to acquire the lock # at the same time so it may start another thread # make sure to check again before proceeding if self.running and self.pid == pid: return self.pid = pid self.running = True # if the profiler thread is changing, # we should create a new buffer along with it self.reset_buffer() # make sure the thread is a daemon here otherwise this # can keep the application running after other threads # have exited self.thread = threading.Thread(name=self.name, target=self.run, daemon=True) try: self.thread.start() except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self.running = False self.thread = None def teardown(self): # type: () -> None if self.running: self.running = False if self.thread is not None: self.thread.join() self.thread = None self.buffer = None class GeventContinuousScheduler(ContinuousScheduler): """ This scheduler is based on the thread scheduler but adapted to work with gevent. When using gevent, it may monkey patch the threading modules (`threading` and `_thread`). This results in the use of greenlets instead of native threads. This is an issue because the sampler CANNOT run in a greenlet because 1. Other greenlets doing sync work will prevent the sampler from running 2. The greenlet runs in the same thread as other greenlets so when taking a sample, other greenlets will have been evicted from the thread. This results in a sample containing only the sampler's code. """ mode = "gevent" # type: ContinuousProfilerMode def __init__(self, frequency, options, sdk_info, capture_func): # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None if ThreadPool is None: raise ValueError("Profiler mode: {} is not available".format(self.mode)) super().__init__(frequency, options, sdk_info, capture_func) self.thread = None # type: Optional[_ThreadPool] self.pid = None # type: Optional[int] self.lock = threading.Lock() def should_autostart(self): # type: () -> bool return super().should_autostart() and self.pid != os.getpid() def ensure_running(self): # type: () -> None pid = os.getpid() # is running on the right process if self.running and self.pid == pid: return with self.lock: # another thread may have tried to acquire the lock # at the same time so it may start another thread # make sure to check again before proceeding if self.running and self.pid == pid: return self.pid = pid self.running = True # if the profiler thread is changing, # we should create a new buffer along with it self.reset_buffer() self.thread = ThreadPool(1) # type: ignore[misc] try: self.thread.spawn(self.run) except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self.running = False self.thread = None return def teardown(self): # type: () -> None if self.running: self.running = False if self.thread is not None: self.thread.join() self.thread = None self.buffer = None PROFILE_BUFFER_SECONDS = 10 class ProfileBuffer: def __init__(self, options, sdk_info, buffer_size, capture_func): # type: (Dict[str, Any], SDKInfo, int, Callable[[Envelope], None]) -> None self.options = options self.sdk_info = sdk_info self.buffer_size = buffer_size self.capture_func = capture_func self.profiler_id = uuid.uuid4().hex self.chunk = ProfileChunk() # Make sure to use the same clock to compute a sample's monotonic timestamp # to ensure the timestamps are correctly aligned. self.start_monotonic_time = now() # Make sure the start timestamp is defined only once per profiler id. # This prevents issues with clock drift within a single profiler session. # # Subtracting the start_monotonic_time here to find a fixed starting position # for relative monotonic timestamps for each sample. self.start_timestamp = ( datetime.now(timezone.utc).timestamp() - self.start_monotonic_time ) def write(self, monotonic_time, sample): # type: (float, ExtractedSample) -> None if self.should_flush(monotonic_time): self.flush() self.chunk = ProfileChunk() self.start_monotonic_time = now() self.chunk.write(self.start_timestamp + monotonic_time, sample) def should_flush(self, monotonic_time): # type: (float) -> bool # If the delta between the new monotonic time and the start monotonic time # exceeds the buffer size, it means we should flush the chunk return monotonic_time - self.start_monotonic_time >= self.buffer_size def flush(self): # type: () -> None chunk = self.chunk.to_json(self.profiler_id, self.options, self.sdk_info) envelope = Envelope() envelope.add_profile_chunk(chunk) self.capture_func(envelope) class ProfileChunk: def __init__(self): # type: () -> None self.chunk_id = uuid.uuid4().hex self.indexed_frames = {} # type: Dict[FrameId, int] self.indexed_stacks = {} # type: Dict[StackId, int] self.frames = [] # type: List[ProcessedFrame] self.stacks = [] # type: List[ProcessedStack] self.samples = [] # type: List[ProcessedSample] def write(self, ts, sample): # type: (float, ExtractedSample) -> None for tid, (stack_id, frame_ids, frames) in sample: try: # Check if the stack is indexed first, this lets us skip # indexing frames if it's not necessary if stack_id not in self.indexed_stacks: for i, frame_id in enumerate(frame_ids): if frame_id not in self.indexed_frames: self.indexed_frames[frame_id] = len(self.indexed_frames) self.frames.append(frames[i]) self.indexed_stacks[stack_id] = len(self.indexed_stacks) self.stacks.append( [self.indexed_frames[frame_id] for frame_id in frame_ids] ) self.samples.append( { "timestamp": ts, "thread_id": tid, "stack_id": self.indexed_stacks[stack_id], } ) except AttributeError: # For some reason, the frame we get doesn't have certain attributes. # When this happens, we abandon the current sample as it's bad. capture_internal_exception(sys.exc_info()) def to_json(self, profiler_id, options, sdk_info): # type: (str, Dict[str, Any], SDKInfo) -> Dict[str, Any] profile = { "frames": self.frames, "stacks": self.stacks, "samples": self.samples, "thread_metadata": { str(thread.ident): { "name": str(thread.name), } for thread in threading.enumerate() }, } set_in_app_in_frames( profile["frames"], options["in_app_exclude"], options["in_app_include"], options["project_root"], ) payload = { "chunk_id": self.chunk_id, "client_sdk": { "name": sdk_info["name"], "version": VERSION, }, "platform": "python", "profile": profile, "profiler_id": profiler_id, "version": "2", } for key in "release", "environment", "dist": if options[key] is not None: payload[key] = str(options[key]).strip() return payload sentry-python-2.18.0/sentry_sdk/profiler/transaction_profiler.py000066400000000000000000000663441471214654000252760ustar00rootroot00000000000000""" This file is originally based on code from https://github.com/nylas/nylas-perftools, which is published under the following license: The MIT License (MIT) Copyright (c) 2014 Nylas Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import atexit import os import platform import random import sys import threading import time import uuid import warnings from abc import ABC, abstractmethod from collections import deque import sentry_sdk from sentry_sdk._lru_cache import LRUCache from sentry_sdk.profiler.utils import ( DEFAULT_SAMPLING_FREQUENCY, extract_stack, ) from sentry_sdk.utils import ( capture_internal_exception, get_current_thread_meta, is_gevent, is_valid_sample_rate, logger, nanosecond_time, set_in_app_in_frames, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Deque from typing import Dict from typing import List from typing import Optional from typing import Set from typing import Type from typing_extensions import TypedDict from sentry_sdk.profiler.utils import ( ProcessedStack, ProcessedFrame, ProcessedThreadMetadata, FrameId, StackId, ThreadId, ExtractedSample, ) from sentry_sdk._types import Event, SamplingContext, ProfilerMode ProcessedSample = TypedDict( "ProcessedSample", { "elapsed_since_start_ns": str, "thread_id": ThreadId, "stack_id": int, }, ) ProcessedProfile = TypedDict( "ProcessedProfile", { "frames": List[ProcessedFrame], "stacks": List[ProcessedStack], "samples": List[ProcessedSample], "thread_metadata": Dict[ThreadId, ProcessedThreadMetadata], }, ) try: from gevent.monkey import get_original from gevent.threadpool import ThreadPool as _ThreadPool ThreadPool = _ThreadPool # type: Optional[Type[_ThreadPool]] thread_sleep = get_original("time", "sleep") except ImportError: thread_sleep = time.sleep ThreadPool = None _scheduler = None # type: Optional[Scheduler] # The minimum number of unique samples that must exist in a profile to be # considered valid. PROFILE_MINIMUM_SAMPLES = 2 def has_profiling_enabled(options): # type: (Dict[str, Any]) -> bool profiles_sampler = options["profiles_sampler"] if profiles_sampler is not None: return True profiles_sample_rate = options["profiles_sample_rate"] if profiles_sample_rate is not None and profiles_sample_rate > 0: return True profiles_sample_rate = options["_experiments"].get("profiles_sample_rate") if profiles_sample_rate is not None: logger.warning( "_experiments['profiles_sample_rate'] is deprecated. " "Please use the non-experimental profiles_sample_rate option " "directly." ) if profiles_sample_rate > 0: return True return False def setup_profiler(options): # type: (Dict[str, Any]) -> bool global _scheduler if _scheduler is not None: logger.debug("[Profiling] Profiler is already setup") return False frequency = DEFAULT_SAMPLING_FREQUENCY if is_gevent(): # If gevent has patched the threading modules then we cannot rely on # them to spawn a native thread for sampling. # Instead we default to the GeventScheduler which is capable of # spawning native threads within gevent. default_profiler_mode = GeventScheduler.mode else: default_profiler_mode = ThreadScheduler.mode if options.get("profiler_mode") is not None: profiler_mode = options["profiler_mode"] else: profiler_mode = options.get("_experiments", {}).get("profiler_mode") if profiler_mode is not None: logger.warning( "_experiments['profiler_mode'] is deprecated. Please use the " "non-experimental profiler_mode option directly." ) profiler_mode = profiler_mode or default_profiler_mode if ( profiler_mode == ThreadScheduler.mode # for legacy reasons, we'll keep supporting sleep mode for this scheduler or profiler_mode == "sleep" ): _scheduler = ThreadScheduler(frequency=frequency) elif profiler_mode == GeventScheduler.mode: _scheduler = GeventScheduler(frequency=frequency) else: raise ValueError("Unknown profiler mode: {}".format(profiler_mode)) logger.debug( "[Profiling] Setting up profiler in {mode} mode".format(mode=_scheduler.mode) ) _scheduler.setup() atexit.register(teardown_profiler) return True def teardown_profiler(): # type: () -> None global _scheduler if _scheduler is not None: _scheduler.teardown() _scheduler = None MAX_PROFILE_DURATION_NS = int(3e10) # 30 seconds class Profile: def __init__( self, sampled, # type: Optional[bool] start_ns, # type: int hub=None, # type: Optional[sentry_sdk.Hub] scheduler=None, # type: Optional[Scheduler] ): # type: (...) -> None self.scheduler = _scheduler if scheduler is None else scheduler self.event_id = uuid.uuid4().hex # type: str self.sampled = sampled # type: Optional[bool] # Various framework integrations are capable of overwriting the active thread id. # If it is set to `None` at the end of the profile, we fall back to the default. self._default_active_thread_id = get_current_thread_meta()[0] or 0 # type: int self.active_thread_id = None # type: Optional[int] try: self.start_ns = start_ns # type: int except AttributeError: self.start_ns = 0 self.stop_ns = 0 # type: int self.active = False # type: bool self.indexed_frames = {} # type: Dict[FrameId, int] self.indexed_stacks = {} # type: Dict[StackId, int] self.frames = [] # type: List[ProcessedFrame] self.stacks = [] # type: List[ProcessedStack] self.samples = [] # type: List[ProcessedSample] self.unique_samples = 0 # Backwards compatibility with the old hub property self._hub = None # type: Optional[sentry_sdk.Hub] if hub is not None: self._hub = hub warnings.warn( "The `hub` parameter is deprecated. Please do not use it.", DeprecationWarning, stacklevel=2, ) def update_active_thread_id(self): # type: () -> None self.active_thread_id = get_current_thread_meta()[0] logger.debug( "[Profiling] updating active thread id to {tid}".format( tid=self.active_thread_id ) ) def _set_initial_sampling_decision(self, sampling_context): # type: (SamplingContext) -> None """ Sets the profile's sampling decision according to the following precedence rules: 1. If the transaction to be profiled is not sampled, that decision will be used, regardless of anything else. 2. Use `profiles_sample_rate` to decide. """ # The corresponding transaction was not sampled, # so don't generate a profile for it. if not self.sampled: logger.debug( "[Profiling] Discarding profile because transaction is discarded." ) self.sampled = False return # The profiler hasn't been properly initialized. if self.scheduler is None: logger.debug( "[Profiling] Discarding profile because profiler was not started." ) self.sampled = False return client = sentry_sdk.get_client() if not client.is_active(): self.sampled = False return options = client.options if callable(options.get("profiles_sampler")): sample_rate = options["profiles_sampler"](sampling_context) elif options["profiles_sample_rate"] is not None: sample_rate = options["profiles_sample_rate"] else: sample_rate = options["_experiments"].get("profiles_sample_rate") # The profiles_sample_rate option was not set, so profiling # was never enabled. if sample_rate is None: logger.debug( "[Profiling] Discarding profile because profiling was not enabled." ) self.sampled = False return if not is_valid_sample_rate(sample_rate, source="Profiling"): logger.warning( "[Profiling] Discarding profile because of invalid sample rate." ) self.sampled = False return # Now we roll the dice. random.random is inclusive of 0, but not of 1, # so strict < is safe here. In case sample_rate is a boolean, cast it # to a float (True becomes 1.0 and False becomes 0.0) self.sampled = random.random() < float(sample_rate) if self.sampled: logger.debug("[Profiling] Initializing profile") else: logger.debug( "[Profiling] Discarding profile because it's not included in the random sample (sample rate = {sample_rate})".format( sample_rate=float(sample_rate) ) ) def start(self): # type: () -> None if not self.sampled or self.active: return assert self.scheduler, "No scheduler specified" logger.debug("[Profiling] Starting profile") self.active = True if not self.start_ns: self.start_ns = nanosecond_time() self.scheduler.start_profiling(self) def stop(self): # type: () -> None if not self.sampled or not self.active: return assert self.scheduler, "No scheduler specified" logger.debug("[Profiling] Stopping profile") self.active = False self.stop_ns = nanosecond_time() def __enter__(self): # type: () -> Profile scope = sentry_sdk.get_isolation_scope() old_profile = scope.profile scope.profile = self self._context_manager_state = (scope, old_profile) self.start() return self def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None self.stop() scope, old_profile = self._context_manager_state del self._context_manager_state scope.profile = old_profile def write(self, ts, sample): # type: (int, ExtractedSample) -> None if not self.active: return if ts < self.start_ns: return offset = ts - self.start_ns if offset > MAX_PROFILE_DURATION_NS: self.stop() return self.unique_samples += 1 elapsed_since_start_ns = str(offset) for tid, (stack_id, frame_ids, frames) in sample: try: # Check if the stack is indexed first, this lets us skip # indexing frames if it's not necessary if stack_id not in self.indexed_stacks: for i, frame_id in enumerate(frame_ids): if frame_id not in self.indexed_frames: self.indexed_frames[frame_id] = len(self.indexed_frames) self.frames.append(frames[i]) self.indexed_stacks[stack_id] = len(self.indexed_stacks) self.stacks.append( [self.indexed_frames[frame_id] for frame_id in frame_ids] ) self.samples.append( { "elapsed_since_start_ns": elapsed_since_start_ns, "thread_id": tid, "stack_id": self.indexed_stacks[stack_id], } ) except AttributeError: # For some reason, the frame we get doesn't have certain attributes. # When this happens, we abandon the current sample as it's bad. capture_internal_exception(sys.exc_info()) def process(self): # type: () -> ProcessedProfile # This collects the thread metadata at the end of a profile. Doing it # this way means that any threads that terminate before the profile ends # will not have any metadata associated with it. thread_metadata = { str(thread.ident): { "name": str(thread.name), } for thread in threading.enumerate() } # type: Dict[str, ProcessedThreadMetadata] return { "frames": self.frames, "stacks": self.stacks, "samples": self.samples, "thread_metadata": thread_metadata, } def to_json(self, event_opt, options): # type: (Event, Dict[str, Any]) -> Dict[str, Any] profile = self.process() set_in_app_in_frames( profile["frames"], options["in_app_exclude"], options["in_app_include"], options["project_root"], ) return { "environment": event_opt.get("environment"), "event_id": self.event_id, "platform": "python", "profile": profile, "release": event_opt.get("release", ""), "timestamp": event_opt["start_timestamp"], "version": "1", "device": { "architecture": platform.machine(), }, "os": { "name": platform.system(), "version": platform.release(), }, "runtime": { "name": platform.python_implementation(), "version": platform.python_version(), }, "transactions": [ { "id": event_opt["event_id"], "name": event_opt["transaction"], # we start the transaction before the profile and this is # the transaction start time relative to the profile, so we # hardcode it to 0 until we can start the profile before "relative_start_ns": "0", # use the duration of the profile instead of the transaction # because we end the transaction after the profile "relative_end_ns": str(self.stop_ns - self.start_ns), "trace_id": event_opt["contexts"]["trace"]["trace_id"], "active_thread_id": str( self._default_active_thread_id if self.active_thread_id is None else self.active_thread_id ), } ], } def valid(self): # type: () -> bool client = sentry_sdk.get_client() if not client.is_active(): return False if not has_profiling_enabled(client.options): return False if self.sampled is None or not self.sampled: if client.transport: client.transport.record_lost_event( "sample_rate", data_category="profile" ) return False if self.unique_samples < PROFILE_MINIMUM_SAMPLES: if client.transport: client.transport.record_lost_event( "insufficient_data", data_category="profile" ) logger.debug("[Profiling] Discarding profile because insufficient samples.") return False return True @property def hub(self): # type: () -> Optional[sentry_sdk.Hub] warnings.warn( "The `hub` attribute is deprecated. Please do not access it.", DeprecationWarning, stacklevel=2, ) return self._hub @hub.setter def hub(self, value): # type: (Optional[sentry_sdk.Hub]) -> None warnings.warn( "The `hub` attribute is deprecated. Please do not set it.", DeprecationWarning, stacklevel=2, ) self._hub = value class Scheduler(ABC): mode = "unknown" # type: ProfilerMode def __init__(self, frequency): # type: (int) -> None self.interval = 1.0 / frequency self.sampler = self.make_sampler() # cap the number of new profiles at any time so it does not grow infinitely self.new_profiles = deque(maxlen=128) # type: Deque[Profile] self.active_profiles = set() # type: Set[Profile] def __enter__(self): # type: () -> Scheduler self.setup() return self def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None self.teardown() @abstractmethod def setup(self): # type: () -> None pass @abstractmethod def teardown(self): # type: () -> None pass def ensure_running(self): # type: () -> None """ Ensure the scheduler is running. By default, this method is a no-op. The method should be overridden by any implementation for which it is relevant. """ return None def start_profiling(self, profile): # type: (Profile) -> None self.ensure_running() self.new_profiles.append(profile) def make_sampler(self): # type: () -> Callable[..., None] cwd = os.getcwd() cache = LRUCache(max_size=256) def _sample_stack(*args, **kwargs): # type: (*Any, **Any) -> None """ Take a sample of the stack on all the threads in the process. This should be called at a regular interval to collect samples. """ # no profiles taking place, so we can stop early if not self.new_profiles and not self.active_profiles: # make sure to clear the cache if we're not profiling so we dont # keep a reference to the last stack of frames around return # This is the number of profiles we want to pop off. # It's possible another thread adds a new profile to # the list and we spend longer than we want inside # the loop below. # # Also make sure to set this value before extracting # frames so we do not write to any new profiles that # were started after this point. new_profiles = len(self.new_profiles) now = nanosecond_time() try: sample = [ (str(tid), extract_stack(frame, cache, cwd)) for tid, frame in sys._current_frames().items() ] except AttributeError: # For some reason, the frame we get doesn't have certain attributes. # When this happens, we abandon the current sample as it's bad. capture_internal_exception(sys.exc_info()) return # Move the new profiles into the active_profiles set. # # We cannot directly add the to active_profiles set # in `start_profiling` because it is called from other # threads which can cause a RuntimeError when it the # set sizes changes during iteration without a lock. # # We also want to avoid using a lock here so threads # that are starting profiles are not blocked until it # can acquire the lock. for _ in range(new_profiles): self.active_profiles.add(self.new_profiles.popleft()) inactive_profiles = [] for profile in self.active_profiles: if profile.active: profile.write(now, sample) else: # If a thread is marked inactive, we buffer it # to `inactive_profiles` so it can be removed. # We cannot remove it here as it would result # in a RuntimeError. inactive_profiles.append(profile) for profile in inactive_profiles: self.active_profiles.remove(profile) return _sample_stack class ThreadScheduler(Scheduler): """ This scheduler is based on running a daemon thread that will call the sampler at a regular interval. """ mode = "thread" # type: ProfilerMode name = "sentry.profiler.ThreadScheduler" def __init__(self, frequency): # type: (int) -> None super().__init__(frequency=frequency) # used to signal to the thread that it should stop self.running = False self.thread = None # type: Optional[threading.Thread] self.pid = None # type: Optional[int] self.lock = threading.Lock() def setup(self): # type: () -> None pass def teardown(self): # type: () -> None if self.running: self.running = False if self.thread is not None: self.thread.join() def ensure_running(self): # type: () -> None """ Check that the profiler has an active thread to run in, and start one if that's not the case. Note that this might fail (e.g. in Python 3.12 it's not possible to spawn new threads at interpreter shutdown). In that case self.running will be False after running this function. """ pid = os.getpid() # is running on the right process if self.running and self.pid == pid: return with self.lock: # another thread may have tried to acquire the lock # at the same time so it may start another thread # make sure to check again before proceeding if self.running and self.pid == pid: return self.pid = pid self.running = True # make sure the thread is a daemon here otherwise this # can keep the application running after other threads # have exited self.thread = threading.Thread(name=self.name, target=self.run, daemon=True) try: self.thread.start() except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self.running = False self.thread = None return def run(self): # type: () -> None last = time.perf_counter() while self.running: self.sampler() # some time may have elapsed since the last time # we sampled, so we need to account for that and # not sleep for too long elapsed = time.perf_counter() - last if elapsed < self.interval: thread_sleep(self.interval - elapsed) # after sleeping, make sure to take the current # timestamp so we can use it next iteration last = time.perf_counter() class GeventScheduler(Scheduler): """ This scheduler is based on the thread scheduler but adapted to work with gevent. When using gevent, it may monkey patch the threading modules (`threading` and `_thread`). This results in the use of greenlets instead of native threads. This is an issue because the sampler CANNOT run in a greenlet because 1. Other greenlets doing sync work will prevent the sampler from running 2. The greenlet runs in the same thread as other greenlets so when taking a sample, other greenlets will have been evicted from the thread. This results in a sample containing only the sampler's code. """ mode = "gevent" # type: ProfilerMode name = "sentry.profiler.GeventScheduler" def __init__(self, frequency): # type: (int) -> None if ThreadPool is None: raise ValueError("Profiler mode: {} is not available".format(self.mode)) super().__init__(frequency=frequency) # used to signal to the thread that it should stop self.running = False self.thread = None # type: Optional[_ThreadPool] self.pid = None # type: Optional[int] # This intentionally uses the gevent patched threading.Lock. # The lock will be required when first trying to start profiles # as we need to spawn the profiler thread from the greenlets. self.lock = threading.Lock() def setup(self): # type: () -> None pass def teardown(self): # type: () -> None if self.running: self.running = False if self.thread is not None: self.thread.join() def ensure_running(self): # type: () -> None pid = os.getpid() # is running on the right process if self.running and self.pid == pid: return with self.lock: # another thread may have tried to acquire the lock # at the same time so it may start another thread # make sure to check again before proceeding if self.running and self.pid == pid: return self.pid = pid self.running = True self.thread = ThreadPool(1) # type: ignore[misc] try: self.thread.spawn(self.run) except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self.running = False self.thread = None return def run(self): # type: () -> None last = time.perf_counter() while self.running: self.sampler() # some time may have elapsed since the last time # we sampled, so we need to account for that and # not sleep for too long elapsed = time.perf_counter() - last if elapsed < self.interval: thread_sleep(self.interval - elapsed) # after sleeping, make sure to take the current # timestamp so we can use it next iteration last = time.perf_counter() sentry-python-2.18.0/sentry_sdk/profiler/utils.py000066400000000000000000000146341471214654000222020ustar00rootroot00000000000000import os from collections import deque from sentry_sdk._compat import PY311 from sentry_sdk.utils import filename_for_module from typing import TYPE_CHECKING if TYPE_CHECKING: from sentry_sdk._lru_cache import LRUCache from types import FrameType from typing import Deque from typing import List from typing import Optional from typing import Sequence from typing import Tuple from typing_extensions import TypedDict ThreadId = str ProcessedStack = List[int] ProcessedFrame = TypedDict( "ProcessedFrame", { "abs_path": str, "filename": Optional[str], "function": str, "lineno": int, "module": Optional[str], }, ) ProcessedThreadMetadata = TypedDict( "ProcessedThreadMetadata", {"name": str}, ) FrameId = Tuple[ str, # abs_path int, # lineno str, # function ] FrameIds = Tuple[FrameId, ...] # The exact value of this id is not very meaningful. The purpose # of this id is to give us a compact and unique identifier for a # raw stack that can be used as a key to a dictionary so that it # can be used during the sampled format generation. StackId = Tuple[int, int] ExtractedStack = Tuple[StackId, FrameIds, List[ProcessedFrame]] ExtractedSample = Sequence[Tuple[ThreadId, ExtractedStack]] # The default sampling frequency to use. This is set at 101 in order to # mitigate the effects of lockstep sampling. DEFAULT_SAMPLING_FREQUENCY = 101 # We want to impose a stack depth limit so that samples aren't too large. MAX_STACK_DEPTH = 128 if PY311: def get_frame_name(frame): # type: (FrameType) -> str return frame.f_code.co_qualname else: def get_frame_name(frame): # type: (FrameType) -> str f_code = frame.f_code co_varnames = f_code.co_varnames # co_name only contains the frame name. If the frame was a method, # the class name will NOT be included. name = f_code.co_name # if it was a method, we can get the class name by inspecting # the f_locals for the `self` argument try: if ( # the co_varnames start with the frame's positional arguments # and we expect the first to be `self` if its an instance method co_varnames and co_varnames[0] == "self" and "self" in frame.f_locals ): for cls in type(frame.f_locals["self"]).__mro__: if name in cls.__dict__: return "{}.{}".format(cls.__name__, name) except (AttributeError, ValueError): pass # if it was a class method, (decorated with `@classmethod`) # we can get the class name by inspecting the f_locals for the `cls` argument try: if ( # the co_varnames start with the frame's positional arguments # and we expect the first to be `cls` if its a class method co_varnames and co_varnames[0] == "cls" and "cls" in frame.f_locals ): for cls in frame.f_locals["cls"].__mro__: if name in cls.__dict__: return "{}.{}".format(cls.__name__, name) except (AttributeError, ValueError): pass # nothing we can do if it is a staticmethod (decorated with @staticmethod) # we've done all we can, time to give up and return what we have return name def frame_id(raw_frame): # type: (FrameType) -> FrameId return (raw_frame.f_code.co_filename, raw_frame.f_lineno, get_frame_name(raw_frame)) def extract_frame(fid, raw_frame, cwd): # type: (FrameId, FrameType, str) -> ProcessedFrame abs_path = raw_frame.f_code.co_filename try: module = raw_frame.f_globals["__name__"] except Exception: module = None # namedtuples can be many times slower when initialing # and accessing attribute so we opt to use a tuple here instead return { # This originally was `os.path.abspath(abs_path)` but that had # a large performance overhead. # # According to docs, this is equivalent to # `os.path.normpath(os.path.join(os.getcwd(), path))`. # The `os.getcwd()` call is slow here, so we precompute it. # # Additionally, since we are using normalized path already, # we skip calling `os.path.normpath` entirely. "abs_path": os.path.join(cwd, abs_path), "module": module, "filename": filename_for_module(module, abs_path) or None, "function": fid[2], "lineno": raw_frame.f_lineno, } def extract_stack( raw_frame, # type: Optional[FrameType] cache, # type: LRUCache cwd, # type: str max_stack_depth=MAX_STACK_DEPTH, # type: int ): # type: (...) -> ExtractedStack """ Extracts the stack starting the specified frame. The extracted stack assumes the specified frame is the top of the stack, and works back to the bottom of the stack. In the event that the stack is more than `MAX_STACK_DEPTH` frames deep, only the first `MAX_STACK_DEPTH` frames will be returned. """ raw_frames = deque(maxlen=max_stack_depth) # type: Deque[FrameType] while raw_frame is not None: f_back = raw_frame.f_back raw_frames.append(raw_frame) raw_frame = f_back frame_ids = tuple(frame_id(raw_frame) for raw_frame in raw_frames) frames = [] for i, fid in enumerate(frame_ids): frame = cache.get(fid) if frame is None: frame = extract_frame(fid, raw_frames[i], cwd) cache.set(fid, frame) frames.append(frame) # Instead of mapping the stack into frame ids and hashing # that as a tuple, we can directly hash the stack. # This saves us from having to generate yet another list. # Additionally, using the stack as the key directly is # costly because the stack can be large, so we pre-hash # the stack, and use the hash as the key as this will be # needed a few times to improve performance. # # To Reduce the likelihood of hash collisions, we include # the stack depth. This means that only stacks of the same # depth can suffer from hash collisions. stack_id = len(raw_frames), hash(frame_ids) return stack_id, frame_ids, frames sentry-python-2.18.0/sentry_sdk/py.typed000066400000000000000000000000001471214654000203230ustar00rootroot00000000000000sentry-python-2.18.0/sentry_sdk/scope.py000066400000000000000000001645751471214654000203430ustar00rootroot00000000000000import os import sys import warnings from copy import copy from collections import deque from contextlib import contextmanager from enum import Enum from datetime import datetime, timezone from functools import wraps from itertools import chain from sentry_sdk.attachments import Attachment from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, FALSE_VALUES, INSTRUMENTER from sentry_sdk.flag_utils import FlagBuffer, DEFAULT_FLAG_CAPACITY from sentry_sdk.profiler.continuous_profiler import try_autostart_continuous_profiler from sentry_sdk.profiler.transaction_profiler import Profile from sentry_sdk.session import Session from sentry_sdk.tracing_utils import ( Baggage, has_tracing_enabled, normalize_incoming_data, PropagationContext, ) from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, NoOpSpan, Span, Transaction, ) from sentry_sdk.utils import ( capture_internal_exception, capture_internal_exceptions, ContextVar, datetime_from_isoformat, disable_capture_event, event_from_exception, exc_info_from_error, logger, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Mapping, MutableMapping from typing import Any from typing import Callable from typing import Deque from typing import Dict from typing import Generator from typing import Iterator from typing import List from typing import Optional from typing import ParamSpec from typing import Tuple from typing import TypeVar from typing import Union from typing_extensions import Unpack from sentry_sdk._types import ( Breadcrumb, BreadcrumbHint, ErrorProcessor, Event, EventProcessor, ExcInfo, Hint, LogLevelStr, SamplingContext, Type, ) from sentry_sdk.tracing import TransactionKwargs import sentry_sdk P = ParamSpec("P") R = TypeVar("R") F = TypeVar("F", bound=Callable[..., Any]) T = TypeVar("T") # Holds data that will be added to **all** events sent by this process. # In case this is a http server (think web framework) with multiple users # the data will be added to events of all users. # Typically this is used for process wide data such as the release. _global_scope = None # type: Optional[Scope] # Holds data for the active request. # This is used to isolate data for different requests or users. # The isolation scope is usually created by integrations, but may also # be created manually _isolation_scope = ContextVar("isolation_scope", default=None) # Holds data for the active span. # This can be used to manually add additional data to a span. _current_scope = ContextVar("current_scope", default=None) global_event_processors = [] # type: List[EventProcessor] class ScopeType(Enum): CURRENT = "current" ISOLATION = "isolation" GLOBAL = "global" MERGED = "merged" class _ScopeManager: def __init__(self, hub=None): # type: (Optional[Any]) -> None self._old_scopes = [] # type: List[Scope] def __enter__(self): # type: () -> Scope isolation_scope = Scope.get_isolation_scope() self._old_scopes.append(isolation_scope) forked_scope = isolation_scope.fork() _isolation_scope.set(forked_scope) return forked_scope def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None old_scope = self._old_scopes.pop() _isolation_scope.set(old_scope) def add_global_event_processor(processor): # type: (EventProcessor) -> None global_event_processors.append(processor) def _attr_setter(fn): # type: (Any) -> Any return property(fset=fn, doc=fn.__doc__) def _disable_capture(fn): # type: (F) -> F @wraps(fn) def wrapper(self, *args, **kwargs): # type: (Any, *Dict[str, Any], **Any) -> Any if not self._should_capture: return try: self._should_capture = False return fn(self, *args, **kwargs) finally: self._should_capture = True return wrapper # type: ignore class Scope: """The scope holds extra information that should be sent with all events that belong to it. """ # NOTE: Even though it should not happen, the scope needs to not crash when # accessed by multiple threads. It's fine if it's full of races, but those # races should never make the user application crash. # # The same needs to hold for any accesses of the scope the SDK makes. __slots__ = ( "_level", "_name", "_fingerprint", # note that for legacy reasons, _transaction is the transaction *name*, # not a Transaction object (the object is stored in _span) "_transaction", "_transaction_info", "_user", "_tags", "_contexts", "_extras", "_breadcrumbs", "_event_processors", "_error_processors", "_should_capture", "_span", "_session", "_attachments", "_force_auto_session_tracking", "_profile", "_propagation_context", "client", "_type", "_last_event_id", "_flags", ) def __init__(self, ty=None, client=None): # type: (Optional[ScopeType], Optional[sentry_sdk.Client]) -> None self._type = ty self._event_processors = [] # type: List[EventProcessor] self._error_processors = [] # type: List[ErrorProcessor] self._name = None # type: Optional[str] self._propagation_context = None # type: Optional[PropagationContext] self.client = NonRecordingClient() # type: sentry_sdk.client.BaseClient if client is not None: self.set_client(client) self.clear() incoming_trace_information = self._load_trace_data_from_env() self.generate_propagation_context(incoming_data=incoming_trace_information) def __copy__(self): # type: () -> Scope """ Returns a copy of this scope. This also creates a copy of all referenced data structures. """ rv = object.__new__(self.__class__) # type: Scope rv._type = self._type rv._level = self._level rv._name = self._name rv._fingerprint = self._fingerprint rv._transaction = self._transaction rv._transaction_info = dict(self._transaction_info) rv._user = self._user rv._tags = dict(self._tags) rv._contexts = dict(self._contexts) rv._extras = dict(self._extras) rv._breadcrumbs = copy(self._breadcrumbs) rv._event_processors = list(self._event_processors) rv._error_processors = list(self._error_processors) rv._propagation_context = self._propagation_context rv._should_capture = self._should_capture rv._span = self._span rv._session = self._session rv._force_auto_session_tracking = self._force_auto_session_tracking rv._attachments = list(self._attachments) rv._profile = self._profile rv._last_event_id = self._last_event_id rv._flags = copy(self._flags) return rv @classmethod def get_current_scope(cls): # type: () -> Scope """ .. versionadded:: 2.0.0 Returns the current scope. """ current_scope = _current_scope.get() if current_scope is None: current_scope = Scope(ty=ScopeType.CURRENT) _current_scope.set(current_scope) return current_scope @classmethod def set_current_scope(cls, new_current_scope): # type: (Scope) -> None """ .. versionadded:: 2.0.0 Sets the given scope as the new current scope overwriting the existing current scope. :param new_current_scope: The scope to set as the new current scope. """ _current_scope.set(new_current_scope) @classmethod def get_isolation_scope(cls): # type: () -> Scope """ .. versionadded:: 2.0.0 Returns the isolation scope. """ isolation_scope = _isolation_scope.get() if isolation_scope is None: isolation_scope = Scope(ty=ScopeType.ISOLATION) _isolation_scope.set(isolation_scope) return isolation_scope @classmethod def set_isolation_scope(cls, new_isolation_scope): # type: (Scope) -> None """ .. versionadded:: 2.0.0 Sets the given scope as the new isolation scope overwriting the existing isolation scope. :param new_isolation_scope: The scope to set as the new isolation scope. """ _isolation_scope.set(new_isolation_scope) @classmethod def get_global_scope(cls): # type: () -> Scope """ .. versionadded:: 2.0.0 Returns the global scope. """ global _global_scope if _global_scope is None: _global_scope = Scope(ty=ScopeType.GLOBAL) return _global_scope @classmethod def last_event_id(cls): # type: () -> Optional[str] """ .. versionadded:: 2.2.0 Returns event ID of the event most recently captured by the isolation scope, or None if no event has been captured. We do not consider events that are dropped, e.g. by a before_send hook. Transactions also are not considered events in this context. The event corresponding to the returned event ID is NOT guaranteed to actually be sent to Sentry; whether the event is sent depends on the transport. The event could be sent later or not at all. Even a sent event could fail to arrive in Sentry due to network issues, exhausted quotas, or various other reasons. """ return cls.get_isolation_scope()._last_event_id def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None): # type: (Optional[Scope], Optional[Dict[str, Any]]) -> Scope """ Merges global, isolation and current scope into a new scope and adds the given additional scope or additional scope kwargs to it. """ if additional_scope and additional_scope_kwargs: raise TypeError("cannot provide scope and kwargs") final_scope = copy(_global_scope) if _global_scope is not None else Scope() final_scope._type = ScopeType.MERGED isolation_scope = _isolation_scope.get() if isolation_scope is not None: final_scope.update_from_scope(isolation_scope) current_scope = _current_scope.get() if current_scope is not None: final_scope.update_from_scope(current_scope) if self != current_scope and self != isolation_scope: final_scope.update_from_scope(self) if additional_scope is not None: if callable(additional_scope): additional_scope(final_scope) else: final_scope.update_from_scope(additional_scope) elif additional_scope_kwargs: final_scope.update_from_kwargs(**additional_scope_kwargs) return final_scope @classmethod def get_client(cls): # type: () -> sentry_sdk.client.BaseClient """ .. versionadded:: 2.0.0 Returns the currently used :py:class:`sentry_sdk.Client`. This checks the current scope, the isolation scope and the global scope for a client. If no client is available a :py:class:`sentry_sdk.client.NonRecordingClient` is returned. """ current_scope = _current_scope.get() try: client = current_scope.client except AttributeError: client = None if client is not None and client.is_active(): return client isolation_scope = _isolation_scope.get() try: client = isolation_scope.client except AttributeError: client = None if client is not None and client.is_active(): return client try: client = _global_scope.client # type: ignore except AttributeError: client = None if client is not None and client.is_active(): return client return NonRecordingClient() def set_client(self, client=None): # type: (Optional[sentry_sdk.client.BaseClient]) -> None """ .. versionadded:: 2.0.0 Sets the client for this scope. :param client: The client to use in this scope. If `None` the client of the scope will be replaced by a :py:class:`sentry_sdk.NonRecordingClient`. """ self.client = client if client is not None else NonRecordingClient() def fork(self): # type: () -> Scope """ .. versionadded:: 2.0.0 Returns a fork of this scope. """ forked_scope = copy(self) return forked_scope def _load_trace_data_from_env(self): # type: () -> Optional[Dict[str, str]] """ Load Sentry trace id and baggage from environment variables. Can be disabled by setting SENTRY_USE_ENVIRONMENT to "false". """ incoming_trace_information = None sentry_use_environment = ( os.environ.get("SENTRY_USE_ENVIRONMENT") or "" ).lower() use_environment = sentry_use_environment not in FALSE_VALUES if use_environment: incoming_trace_information = {} if os.environ.get("SENTRY_TRACE"): incoming_trace_information[SENTRY_TRACE_HEADER_NAME] = ( os.environ.get("SENTRY_TRACE") or "" ) if os.environ.get("SENTRY_BAGGAGE"): incoming_trace_information[BAGGAGE_HEADER_NAME] = ( os.environ.get("SENTRY_BAGGAGE") or "" ) return incoming_trace_information or None def set_new_propagation_context(self): # type: () -> None """ Creates a new propagation context and sets it as `_propagation_context`. Overwriting existing one. """ self._propagation_context = PropagationContext() def generate_propagation_context(self, incoming_data=None): # type: (Optional[Dict[str, str]]) -> None """ Makes sure the propagation context is set on the scope. If there is `incoming_data` overwrite existing propagation context. If there is no `incoming_data` create new propagation context, but do NOT overwrite if already existing. """ if incoming_data: propagation_context = PropagationContext.from_incoming_data(incoming_data) if propagation_context is not None: self._propagation_context = propagation_context if self._type != ScopeType.CURRENT: if self._propagation_context is None: self.set_new_propagation_context() def get_dynamic_sampling_context(self): # type: () -> Optional[Dict[str, str]] """ Returns the Dynamic Sampling Context from the Propagation Context. If not existing, creates a new one. """ if self._propagation_context is None: return None baggage = self.get_baggage() if baggage is not None: self._propagation_context.dynamic_sampling_context = ( baggage.dynamic_sampling_context() ) return self._propagation_context.dynamic_sampling_context def get_traceparent(self, *args, **kwargs): # type: (Any, Any) -> Optional[str] """ Returns the Sentry "sentry-trace" header (aka the traceparent) from the currently active span or the scopes Propagation Context. """ client = self.get_client() # If we have an active span, return traceparent from there if has_tracing_enabled(client.options) and self.span is not None: return self.span.to_traceparent() # If this scope has a propagation context, return traceparent from there if self._propagation_context is not None: traceparent = "%s-%s" % ( self._propagation_context.trace_id, self._propagation_context.span_id, ) return traceparent # Fall back to isolation scope's traceparent. It always has one return self.get_isolation_scope().get_traceparent() def get_baggage(self, *args, **kwargs): # type: (Any, Any) -> Optional[Baggage] """ Returns the Sentry "baggage" header containing trace information from the currently active span or the scopes Propagation Context. """ client = self.get_client() # If we have an active span, return baggage from there if has_tracing_enabled(client.options) and self.span is not None: return self.span.to_baggage() # If this scope has a propagation context, return baggage from there if self._propagation_context is not None: dynamic_sampling_context = ( self._propagation_context.dynamic_sampling_context ) if dynamic_sampling_context is None: return Baggage.from_options(self) else: return Baggage(dynamic_sampling_context) # Fall back to isolation scope's baggage. It always has one return self.get_isolation_scope().get_baggage() def get_trace_context(self): # type: () -> Any """ Returns the Sentry "trace" context from the Propagation Context. """ if self._propagation_context is None: return None trace_context = { "trace_id": self._propagation_context.trace_id, "span_id": self._propagation_context.span_id, "parent_span_id": self._propagation_context.parent_span_id, "dynamic_sampling_context": self.get_dynamic_sampling_context(), } # type: Dict[str, Any] return trace_context def trace_propagation_meta(self, *args, **kwargs): # type: (*Any, **Any) -> str """ Return meta tags which should be injected into HTML templates to allow propagation of trace information. """ span = kwargs.pop("span", None) if span is not None: logger.warning( "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future." ) meta = "" sentry_trace = self.get_traceparent() if sentry_trace is not None: meta += '' % ( SENTRY_TRACE_HEADER_NAME, sentry_trace, ) baggage = self.get_baggage() if baggage is not None: meta += '' % ( BAGGAGE_HEADER_NAME, baggage.serialize(), ) return meta def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] """ Creates a generator which returns the `sentry-trace` and `baggage` headers from the Propagation Context. """ if self._propagation_context is not None: traceparent = self.get_traceparent() if traceparent is not None: yield SENTRY_TRACE_HEADER_NAME, traceparent dsc = self.get_dynamic_sampling_context() if dsc is not None: baggage = Baggage(dsc).serialize() yield BAGGAGE_HEADER_NAME, baggage def iter_trace_propagation_headers(self, *args, **kwargs): # type: (Any, Any) -> Generator[Tuple[str, str], None, None] """ Return HTTP headers which allow propagation of trace data. If a span is given, the trace data will taken from the span. If no span is given, the trace data is taken from the scope. """ client = self.get_client() if not client.options.get("propagate_traces"): return span = kwargs.pop("span", None) span = span or self.span if has_tracing_enabled(client.options) and span is not None: for header in span.iter_headers(): yield header else: # If this scope has a propagation context, return headers from there # (it could be that self is not the current scope nor the isolation scope) if self._propagation_context is not None: for header in self.iter_headers(): yield header else: # otherwise try headers from current scope current_scope = self.get_current_scope() if current_scope._propagation_context is not None: for header in current_scope.iter_headers(): yield header else: # otherwise fall back to headers from isolation scope isolation_scope = self.get_isolation_scope() if isolation_scope._propagation_context is not None: for header in isolation_scope.iter_headers(): yield header def get_active_propagation_context(self): # type: () -> Optional[PropagationContext] if self._propagation_context is not None: return self._propagation_context current_scope = self.get_current_scope() if current_scope._propagation_context is not None: return current_scope._propagation_context isolation_scope = self.get_isolation_scope() if isolation_scope._propagation_context is not None: return isolation_scope._propagation_context return None def clear(self): # type: () -> None """Clears the entire scope.""" self._level = None # type: Optional[LogLevelStr] self._fingerprint = None # type: Optional[List[str]] self._transaction = None # type: Optional[str] self._transaction_info = {} # type: MutableMapping[str, str] self._user = None # type: Optional[Dict[str, Any]] self._tags = {} # type: Dict[str, Any] self._contexts = {} # type: Dict[str, Dict[str, Any]] self._extras = {} # type: MutableMapping[str, Any] self._attachments = [] # type: List[Attachment] self.clear_breadcrumbs() self._should_capture = True # type: bool self._span = None # type: Optional[Span] self._session = None # type: Optional[Session] self._force_auto_session_tracking = None # type: Optional[bool] self._profile = None # type: Optional[Profile] self._propagation_context = None # self._last_event_id is only applicable to isolation scopes self._last_event_id = None # type: Optional[str] self._flags = None # type: Optional[FlagBuffer] @_attr_setter def level(self, value): # type: (LogLevelStr) -> None """ When set this overrides the level. .. deprecated:: 1.0.0 Use :func:`set_level` instead. :param value: The level to set. """ logger.warning( "Deprecated: use .set_level() instead. This will be removed in the future." ) self._level = value def set_level(self, value): # type: (LogLevelStr) -> None """ Sets the level for the scope. :param value: The level to set. """ self._level = value @_attr_setter def fingerprint(self, value): # type: (Optional[List[str]]) -> None """When set this overrides the default fingerprint.""" self._fingerprint = value @property def transaction(self): # type: () -> Any # would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004 """Return the transaction (root span) in the scope, if any.""" # there is no span/transaction on the scope if self._span is None: return None # there is an orphan span on the scope if self._span.containing_transaction is None: return None # there is either a transaction (which is its own containing # transaction) or a non-orphan span on the scope return self._span.containing_transaction @transaction.setter def transaction(self, value): # type: (Any) -> None # would be type: (Optional[str]) -> None, see https://github.com/python/mypy/issues/3004 """When set this forces a specific transaction name to be set. Deprecated: use set_transaction_name instead.""" # XXX: the docstring above is misleading. The implementation of # apply_to_event prefers an existing value of event.transaction over # anything set in the scope. # XXX: note that with the introduction of the Scope.transaction getter, # there is a semantic and type mismatch between getter and setter. The # getter returns a Transaction, the setter sets a transaction name. # Without breaking version compatibility, we could make the setter set a # transaction name or transaction (self._span) depending on the type of # the value argument. logger.warning( "Assigning to scope.transaction directly is deprecated: use scope.set_transaction_name() instead." ) self._transaction = value if self._span and self._span.containing_transaction: self._span.containing_transaction.name = value def set_transaction_name(self, name, source=None): # type: (str, Optional[str]) -> None """Set the transaction name and optionally the transaction source.""" self._transaction = name if self._span and self._span.containing_transaction: self._span.containing_transaction.name = name if source: self._span.containing_transaction.source = source if source: self._transaction_info["source"] = source @_attr_setter def user(self, value): # type: (Optional[Dict[str, Any]]) -> None """When set a specific user is bound to the scope. Deprecated in favor of set_user.""" self.set_user(value) def set_user(self, value): # type: (Optional[Dict[str, Any]]) -> None """Sets a user for the scope.""" self._user = value session = self.get_isolation_scope()._session if session is not None: session.update(user=value) @property def span(self): # type: () -> Optional[Span] """Get/set current tracing span or transaction.""" return self._span @span.setter def span(self, span): # type: (Optional[Span]) -> None self._span = span # XXX: this differs from the implementation in JS, there Scope.setSpan # does not set Scope._transactionName. if isinstance(span, Transaction): transaction = span if transaction.name: self._transaction = transaction.name if transaction.source: self._transaction_info["source"] = transaction.source @property def profile(self): # type: () -> Optional[Profile] return self._profile @profile.setter def profile(self, profile): # type: (Optional[Profile]) -> None self._profile = profile def set_tag(self, key, value): # type: (str, Any) -> None """ Sets a tag for a key to a specific value. :param key: Key of the tag to set. :param value: Value of the tag to set. """ self._tags[key] = value def set_tags(self, tags): # type: (Mapping[str, object]) -> None """Sets multiple tags at once. This method updates multiple tags at once. The tags are passed as a dictionary or other mapping type. Calling this method is equivalent to calling `set_tag` on each key-value pair in the mapping. If a tag key already exists in the scope, its value will be updated. If the tag key does not exist in the scope, the key-value pair will be added to the scope. This method only modifies tag keys in the `tags` mapping passed to the method. `scope.set_tags({})` is, therefore, a no-op. :param tags: A mapping of tag keys to tag values to set. """ self._tags.update(tags) def remove_tag(self, key): # type: (str) -> None """ Removes a specific tag. :param key: Key of the tag to remove. """ self._tags.pop(key, None) def set_context( self, key, # type: str value, # type: Dict[str, Any] ): # type: (...) -> None """ Binds a context at a certain key to a specific value. """ self._contexts[key] = value def remove_context( self, key # type: str ): # type: (...) -> None """Removes a context.""" self._contexts.pop(key, None) def set_extra( self, key, # type: str value, # type: Any ): # type: (...) -> None """Sets an extra key to a specific value.""" self._extras[key] = value def remove_extra( self, key # type: str ): # type: (...) -> None """Removes a specific extra key.""" self._extras.pop(key, None) def clear_breadcrumbs(self): # type: () -> None """Clears breadcrumb buffer.""" self._breadcrumbs = deque() # type: Deque[Breadcrumb] def add_attachment( self, bytes=None, # type: Union[None, bytes, Callable[[], bytes]] filename=None, # type: Optional[str] path=None, # type: Optional[str] content_type=None, # type: Optional[str] add_to_transactions=False, # type: bool ): # type: (...) -> None """Adds an attachment to future events sent from this scope. The parameters are the same as for the :py:class:`sentry_sdk.attachments.Attachment` constructor. """ self._attachments.append( Attachment( bytes=bytes, path=path, filename=filename, content_type=content_type, add_to_transactions=add_to_transactions, ) ) def add_breadcrumb(self, crumb=None, hint=None, **kwargs): # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None """ Adds a breadcrumb. :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects. :param hint: An optional value that can be used by `before_breadcrumb` to customize the breadcrumbs that are emitted. """ client = self.get_client() if not client.is_active(): logger.info("Dropped breadcrumb because no client bound") return before_breadcrumb = client.options.get("before_breadcrumb") max_breadcrumbs = client.options.get("max_breadcrumbs", DEFAULT_MAX_BREADCRUMBS) crumb = dict(crumb or ()) # type: Breadcrumb crumb.update(kwargs) if not crumb: return hint = dict(hint or ()) # type: Hint if crumb.get("timestamp") is None: crumb["timestamp"] = datetime.now(timezone.utc) if crumb.get("type") is None: crumb["type"] = "default" if before_breadcrumb is not None: new_crumb = before_breadcrumb(crumb, hint) else: new_crumb = crumb if new_crumb is not None: self._breadcrumbs.append(new_crumb) else: logger.info("before breadcrumb dropped breadcrumb (%s)", crumb) while len(self._breadcrumbs) > max_breadcrumbs: self._breadcrumbs.popleft() def start_transaction( self, transaction=None, instrumenter=INSTRUMENTER.SENTRY, custom_sampling_context=None, **kwargs, ): # type: (Optional[Transaction], str, Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] """ Start and return a transaction. Start an existing transaction if given, otherwise create and start a new transaction with kwargs. This is the entry point to manual tracing instrumentation. A tree structure can be built by adding child spans to the transaction, and child spans to other spans. To start a new child span within the transaction or any span, call the respective `.start_child()` method. Every child span must be finished before the transaction is finished, otherwise the unfinished spans are discarded. When used as context managers, spans and transactions are automatically finished at the end of the `with` block. If not using context managers, call the `.finish()` method. When the transaction is finished, it will be sent to Sentry with all its finished child spans. :param transaction: The transaction to start. If omitted, we create and start a new transaction. :param instrumenter: This parameter is meant for internal use only. It will be removed in the next major version. :param custom_sampling_context: The transaction's custom sampling context. :param kwargs: Optional keyword arguments to be passed to the Transaction constructor. See :py:class:`sentry_sdk.tracing.Transaction` for available arguments. """ kwargs.setdefault("scope", self) client = self.get_client() configuration_instrumenter = client.options["instrumenter"] if instrumenter != configuration_instrumenter: return NoOpSpan() try_autostart_continuous_profiler() custom_sampling_context = custom_sampling_context or {} # kwargs at this point has type TransactionKwargs, since we have removed # the client and custom_sampling_context from it. transaction_kwargs = kwargs # type: TransactionKwargs # if we haven't been given a transaction, make one if transaction is None: transaction = Transaction(**transaction_kwargs) # use traces_sample_rate, traces_sampler, and/or inheritance to make a # sampling decision sampling_context = { "transaction_context": transaction.to_json(), "parent_sampled": transaction.parent_sampled, } sampling_context.update(custom_sampling_context) transaction._set_initial_sampling_decision(sampling_context=sampling_context) if transaction.sampled: profile = Profile( transaction.sampled, transaction._start_timestamp_monotonic_ns ) profile._set_initial_sampling_decision(sampling_context=sampling_context) transaction._profile = profile # we don't bother to keep spans if we already know we're not going to # send the transaction max_spans = (client.options["_experiments"].get("max_spans")) or 1000 transaction.init_span_recorder(maxlen=max_spans) return transaction def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): # type: (str, Any) -> Span """ Start a span whose parent is the currently active span or transaction, if any. The return value is a :py:class:`sentry_sdk.tracing.Span` instance, typically used as a context manager to start and stop timing in a `with` block. Only spans contained in a transaction are sent to Sentry. Most integrations start a transaction at the appropriate time, for example for every incoming HTTP request. Use :py:meth:`sentry_sdk.start_transaction` to start a new transaction when one is not already in progress. For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. The instrumenter parameter is deprecated for user code, and it will be removed in the next major version. Going forward, it should only be used by the SDK itself. """ if kwargs.get("description") is not None: warnings.warn( "The `description` parameter is deprecated. Please use `name` instead.", DeprecationWarning, stacklevel=2, ) with new_scope(): kwargs.setdefault("scope", self) client = self.get_client() configuration_instrumenter = client.options["instrumenter"] if instrumenter != configuration_instrumenter: return NoOpSpan() # get current span or transaction span = self.span or self.get_isolation_scope().span if span is None: # New spans get the `trace_id` from the scope if "trace_id" not in kwargs: propagation_context = self.get_active_propagation_context() if propagation_context is not None: kwargs["trace_id"] = propagation_context.trace_id span = Span(**kwargs) else: # Children take `trace_id`` from the parent span. span = span.start_child(**kwargs) return span def continue_trace( self, environ_or_headers, op=None, name=None, source=None, origin="manual" ): # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], str) -> Transaction """ Sets the propagation context from environment or headers and returns a transaction. """ self.generate_propagation_context(environ_or_headers) transaction = Transaction.continue_from_headers( normalize_incoming_data(environ_or_headers), op=op, origin=origin, name=name, source=source, ) return transaction def capture_event(self, event, hint=None, scope=None, **scope_kwargs): # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] """ Captures an event. Merges given scope data and calls :py:meth:`sentry_sdk.client._Client.capture_event`. :param event: A ready-made event that can be directly sent to Sentry. :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object. :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. The `scope` and `scope_kwargs` parameters are mutually exclusive. :param scope_kwargs: Optional data to apply to event. For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. The `scope` and `scope_kwargs` parameters are mutually exclusive. :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). """ if disable_capture_event.get(False): return None scope = self._merge_scopes(scope, scope_kwargs) event_id = self.get_client().capture_event(event=event, hint=hint, scope=scope) if event_id is not None and event.get("type") != "transaction": self.get_isolation_scope()._last_event_id = event_id return event_id def capture_message(self, message, level=None, scope=None, **scope_kwargs): # type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str] """ Captures a message. :param message: The string to send as the message. :param level: If no level is provided, the default level is `info`. :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. The `scope` and `scope_kwargs` parameters are mutually exclusive. :param scope_kwargs: Optional data to apply to event. For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. The `scope` and `scope_kwargs` parameters are mutually exclusive. :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). """ if disable_capture_event.get(False): return None if level is None: level = "info" event = { "message": message, "level": level, } # type: Event return self.capture_event(event, scope=scope, **scope_kwargs) def capture_exception(self, error=None, scope=None, **scope_kwargs): # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str] """Captures an exception. :param error: An exception to capture. If `None`, `sys.exc_info()` will be used. :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. The `scope` and `scope_kwargs` parameters are mutually exclusive. :param scope_kwargs: Optional data to apply to event. For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. The `scope` and `scope_kwargs` parameters are mutually exclusive. :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). """ if disable_capture_event.get(False): return None if error is not None: exc_info = exc_info_from_error(error) else: exc_info = sys.exc_info() event, hint = event_from_exception( exc_info, client_options=self.get_client().options ) try: return self.capture_event(event, hint=hint, scope=scope, **scope_kwargs) except Exception: capture_internal_exception(sys.exc_info()) return None def start_session(self, *args, **kwargs): # type: (*Any, **Any) -> None """Starts a new session.""" session_mode = kwargs.pop("session_mode", "application") self.end_session() client = self.get_client() self._session = Session( release=client.options.get("release"), environment=client.options.get("environment"), user=self._user, session_mode=session_mode, ) def end_session(self, *args, **kwargs): # type: (*Any, **Any) -> None """Ends the current session if there is one.""" session = self._session self._session = None if session is not None: session.close() self.get_client().capture_session(session) def stop_auto_session_tracking(self, *args, **kwargs): # type: (*Any, **Any) -> None """Stops automatic session tracking. This temporarily session tracking for the current scope when called. To resume session tracking call `resume_auto_session_tracking`. """ self.end_session() self._force_auto_session_tracking = False def resume_auto_session_tracking(self): # type: (...) -> None """Resumes automatic session tracking for the current scope if disabled earlier. This requires that generally automatic session tracking is enabled. """ self._force_auto_session_tracking = None def add_event_processor( self, func # type: EventProcessor ): # type: (...) -> None """Register a scope local event processor on the scope. :param func: This function behaves like `before_send.` """ if len(self._event_processors) > 20: logger.warning( "Too many event processors on scope! Clearing list to free up some memory: %r", self._event_processors, ) del self._event_processors[:] self._event_processors.append(func) def add_error_processor( self, func, # type: ErrorProcessor cls=None, # type: Optional[Type[BaseException]] ): # type: (...) -> None """Register a scope local error processor on the scope. :param func: A callback that works similar to an event processor but is invoked with the original exception info triple as second argument. :param cls: Optionally, only process exceptions of this type. """ if cls is not None: cls_ = cls # For mypy. real_func = func def func(event, exc_info): # type: (Event, ExcInfo) -> Optional[Event] try: is_inst = isinstance(exc_info[1], cls_) except Exception: is_inst = False if is_inst: return real_func(event, exc_info) return event self._error_processors.append(func) def _apply_level_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if self._level is not None: event["level"] = self._level def _apply_breadcrumbs_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None event.setdefault("breadcrumbs", {}).setdefault("values", []).extend( self._breadcrumbs ) # Attempt to sort timestamps try: for crumb in event["breadcrumbs"]["values"]: if isinstance(crumb["timestamp"], str): crumb["timestamp"] = datetime_from_isoformat(crumb["timestamp"]) event["breadcrumbs"]["values"].sort(key=lambda crumb: crumb["timestamp"]) except Exception as err: logger.debug("Error when sorting breadcrumbs", exc_info=err) pass def _apply_user_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if event.get("user") is None and self._user is not None: event["user"] = self._user def _apply_transaction_name_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if event.get("transaction") is None and self._transaction is not None: event["transaction"] = self._transaction def _apply_transaction_info_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if event.get("transaction_info") is None and self._transaction_info is not None: event["transaction_info"] = self._transaction_info def _apply_fingerprint_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if event.get("fingerprint") is None and self._fingerprint is not None: event["fingerprint"] = self._fingerprint def _apply_extra_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if self._extras: event.setdefault("extra", {}).update(self._extras) def _apply_tags_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if self._tags: event.setdefault("tags", {}).update(self._tags) def _apply_contexts_to_event(self, event, hint, options): # type: (Event, Hint, Optional[Dict[str, Any]]) -> None if self._contexts: event.setdefault("contexts", {}).update(self._contexts) contexts = event.setdefault("contexts", {}) # Add "trace" context if contexts.get("trace") is None: if has_tracing_enabled(options) and self._span is not None: contexts["trace"] = self._span.get_trace_context() else: contexts["trace"] = self.get_trace_context() def _drop(self, cause, ty): # type: (Any, str) -> Optional[Any] logger.info("%s (%s) dropped event", ty, cause) return None def run_error_processors(self, event, hint): # type: (Event, Hint) -> Optional[Event] """ Runs the error processors on the event and returns the modified event. """ exc_info = hint.get("exc_info") if exc_info is not None: error_processors = chain( self.get_global_scope()._error_processors, self.get_isolation_scope()._error_processors, self.get_current_scope()._error_processors, ) for error_processor in error_processors: new_event = error_processor(event, exc_info) if new_event is None: return self._drop(error_processor, "error processor") event = new_event return event def run_event_processors(self, event, hint): # type: (Event, Hint) -> Optional[Event] """ Runs the event processors on the event and returns the modified event. """ ty = event.get("type") is_check_in = ty == "check_in" if not is_check_in: # Get scopes without creating them to prevent infinite recursion isolation_scope = _isolation_scope.get() current_scope = _current_scope.get() event_processors = chain( global_event_processors, _global_scope and _global_scope._event_processors or [], isolation_scope and isolation_scope._event_processors or [], current_scope and current_scope._event_processors or [], ) for event_processor in event_processors: new_event = event with capture_internal_exceptions(): new_event = event_processor(event, hint) if new_event is None: return self._drop(event_processor, "event processor") event = new_event return event @_disable_capture def apply_to_event( self, event, # type: Event hint, # type: Hint options=None, # type: Optional[Dict[str, Any]] ): # type: (...) -> Optional[Event] """Applies the information contained on the scope to the given event.""" ty = event.get("type") is_transaction = ty == "transaction" is_check_in = ty == "check_in" # put all attachments into the hint. This lets callbacks play around # with attachments. We also later pull this out of the hint when we # create the envelope. attachments_to_send = hint.get("attachments") or [] for attachment in self._attachments: if not is_transaction or attachment.add_to_transactions: attachments_to_send.append(attachment) hint["attachments"] = attachments_to_send self._apply_contexts_to_event(event, hint, options) if is_check_in: # Check-ins only support the trace context, strip all others event["contexts"] = { "trace": event.setdefault("contexts", {}).get("trace", {}) } if not is_check_in: self._apply_level_to_event(event, hint, options) self._apply_fingerprint_to_event(event, hint, options) self._apply_user_to_event(event, hint, options) self._apply_transaction_name_to_event(event, hint, options) self._apply_transaction_info_to_event(event, hint, options) self._apply_tags_to_event(event, hint, options) self._apply_extra_to_event(event, hint, options) if not is_transaction and not is_check_in: self._apply_breadcrumbs_to_event(event, hint, options) event = self.run_error_processors(event, hint) if event is None: return None event = self.run_event_processors(event, hint) if event is None: return None return event def update_from_scope(self, scope): # type: (Scope) -> None """Update the scope with another scope's data.""" if scope._level is not None: self._level = scope._level if scope._fingerprint is not None: self._fingerprint = scope._fingerprint if scope._transaction is not None: self._transaction = scope._transaction if scope._transaction_info is not None: self._transaction_info.update(scope._transaction_info) if scope._user is not None: self._user = scope._user if scope._tags: self._tags.update(scope._tags) if scope._contexts: self._contexts.update(scope._contexts) if scope._extras: self._extras.update(scope._extras) if scope._breadcrumbs: self._breadcrumbs.extend(scope._breadcrumbs) if scope._span: self._span = scope._span if scope._attachments: self._attachments.extend(scope._attachments) if scope._profile: self._profile = scope._profile if scope._propagation_context: self._propagation_context = scope._propagation_context if scope._session: self._session = scope._session def update_from_kwargs( self, user=None, # type: Optional[Any] level=None, # type: Optional[LogLevelStr] extras=None, # type: Optional[Dict[str, Any]] contexts=None, # type: Optional[Dict[str, Any]] tags=None, # type: Optional[Dict[str, str]] fingerprint=None, # type: Optional[List[str]] ): # type: (...) -> None """Update the scope's attributes.""" if level is not None: self._level = level if user is not None: self._user = user if extras is not None: self._extras.update(extras) if contexts is not None: self._contexts.update(contexts) if tags is not None: self._tags.update(tags) if fingerprint is not None: self._fingerprint = fingerprint def __repr__(self): # type: () -> str return "<%s id=%s name=%s type=%s>" % ( self.__class__.__name__, hex(id(self)), self._name, self._type, ) @property def flags(self): # type: () -> FlagBuffer if self._flags is None: max_flags = ( self.get_client().options["_experiments"].get("max_flags") or DEFAULT_FLAG_CAPACITY ) self._flags = FlagBuffer(capacity=max_flags) return self._flags @contextmanager def new_scope(): # type: () -> Generator[Scope, None, None] """ .. versionadded:: 2.0.0 Context manager that forks the current scope and runs the wrapped code in it. After the wrapped code is executed, the original scope is restored. Example Usage: .. code-block:: python import sentry_sdk with sentry_sdk.new_scope() as scope: scope.set_tag("color", "green") sentry_sdk.capture_message("hello") # will include `color` tag. sentry_sdk.capture_message("hello, again") # will NOT include `color` tag. """ # fork current scope current_scope = Scope.get_current_scope() new_scope = current_scope.fork() token = _current_scope.set(new_scope) try: yield new_scope finally: # restore original scope _current_scope.reset(token) @contextmanager def use_scope(scope): # type: (Scope) -> Generator[Scope, None, None] """ .. versionadded:: 2.0.0 Context manager that uses the given `scope` and runs the wrapped code in it. After the wrapped code is executed, the original scope is restored. Example Usage: Suppose the variable `scope` contains a `Scope` object, which is not currently the active scope. .. code-block:: python import sentry_sdk with sentry_sdk.use_scope(scope): scope.set_tag("color", "green") sentry_sdk.capture_message("hello") # will include `color` tag. sentry_sdk.capture_message("hello, again") # will NOT include `color` tag. """ # set given scope as current scope token = _current_scope.set(scope) try: yield scope finally: # restore original scope _current_scope.reset(token) @contextmanager def isolation_scope(): # type: () -> Generator[Scope, None, None] """ .. versionadded:: 2.0.0 Context manager that forks the current isolation scope and runs the wrapped code in it. The current scope is also forked to not bleed data into the existing current scope. After the wrapped code is executed, the original scopes are restored. Example Usage: .. code-block:: python import sentry_sdk with sentry_sdk.isolation_scope() as scope: scope.set_tag("color", "green") sentry_sdk.capture_message("hello") # will include `color` tag. sentry_sdk.capture_message("hello, again") # will NOT include `color` tag. """ # fork current scope current_scope = Scope.get_current_scope() forked_current_scope = current_scope.fork() current_token = _current_scope.set(forked_current_scope) # fork isolation scope isolation_scope = Scope.get_isolation_scope() new_isolation_scope = isolation_scope.fork() isolation_token = _isolation_scope.set(new_isolation_scope) try: yield new_isolation_scope finally: # restore original scopes _current_scope.reset(current_token) _isolation_scope.reset(isolation_token) @contextmanager def use_isolation_scope(isolation_scope): # type: (Scope) -> Generator[Scope, None, None] """ .. versionadded:: 2.0.0 Context manager that uses the given `isolation_scope` and runs the wrapped code in it. The current scope is also forked to not bleed data into the existing current scope. After the wrapped code is executed, the original scopes are restored. Example Usage: .. code-block:: python import sentry_sdk with sentry_sdk.isolation_scope() as scope: scope.set_tag("color", "green") sentry_sdk.capture_message("hello") # will include `color` tag. sentry_sdk.capture_message("hello, again") # will NOT include `color` tag. """ # fork current scope current_scope = Scope.get_current_scope() forked_current_scope = current_scope.fork() current_token = _current_scope.set(forked_current_scope) # set given scope as isolation scope isolation_token = _isolation_scope.set(isolation_scope) try: yield isolation_scope finally: # restore original scopes _current_scope.reset(current_token) _isolation_scope.reset(isolation_token) def should_send_default_pii(): # type: () -> bool """Shortcut for `Scope.get_client().should_send_default_pii()`.""" return Scope.get_client().should_send_default_pii() # Circular imports from sentry_sdk.client import NonRecordingClient if TYPE_CHECKING: import sentry_sdk.client sentry-python-2.18.0/sentry_sdk/scrubber.py000066400000000000000000000134461471214654000210270ustar00rootroot00000000000000from sentry_sdk.utils import ( capture_internal_exceptions, AnnotatedValue, iter_event_frames, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from sentry_sdk._types import Event from typing import List from typing import Optional DEFAULT_DENYLIST = [ # stolen from relay "password", "passwd", "secret", "api_key", "apikey", "auth", "credentials", "mysql_pwd", "privatekey", "private_key", "token", "session", # django "csrftoken", "sessionid", # wsgi "x_csrftoken", "x_forwarded_for", "set_cookie", "cookie", "authorization", "x_api_key", # other common names used in the wild "aiohttp_session", # aiohttp "connect.sid", # Express "csrf_token", # Pyramid "csrf", # (this is a cookie name used in accepted answers on stack overflow) "_csrf", # Express "_csrf_token", # Bottle "PHPSESSID", # PHP "_session", # Sanic "symfony", # Symfony "user_session", # Vue "_xsrf", # Tornado "XSRF-TOKEN", # Angular, Laravel ] DEFAULT_PII_DENYLIST = [ "x_forwarded_for", "x_real_ip", "ip_address", "remote_addr", ] class EventScrubber: def __init__( self, denylist=None, recursive=False, send_default_pii=False, pii_denylist=None ): # type: (Optional[List[str]], bool, bool, Optional[List[str]]) -> None """ A scrubber that goes through the event payload and removes sensitive data configured through denylists. :param denylist: A security denylist that is always scrubbed, defaults to DEFAULT_DENYLIST. :param recursive: Whether to scrub the event payload recursively, default False. :param send_default_pii: Whether pii is sending is on, pii fields are not scrubbed. :param pii_denylist: The denylist to use for scrubbing when pii is not sent, defaults to DEFAULT_PII_DENYLIST. """ self.denylist = DEFAULT_DENYLIST.copy() if denylist is None else denylist if not send_default_pii: pii_denylist = ( DEFAULT_PII_DENYLIST.copy() if pii_denylist is None else pii_denylist ) self.denylist += pii_denylist self.denylist = [x.lower() for x in self.denylist] self.recursive = recursive def scrub_list(self, lst): # type: (object) -> None """ If a list is passed to this method, the method recursively searches the list and any nested lists for any dictionaries. The method calls scrub_dict on all dictionaries it finds. If the parameter passed to this method is not a list, the method does nothing. """ if not isinstance(lst, list): return for v in lst: self.scrub_dict(v) # no-op unless v is a dict self.scrub_list(v) # no-op unless v is a list def scrub_dict(self, d): # type: (object) -> None """ If a dictionary is passed to this method, the method scrubs the dictionary of any sensitive data. The method calls itself recursively on any nested dictionaries ( including dictionaries nested in lists) if self.recursive is True. This method does nothing if the parameter passed to it is not a dictionary. """ if not isinstance(d, dict): return for k, v in d.items(): # The cast is needed because mypy is not smart enough to figure out that k must be a # string after the isinstance check. if isinstance(k, str) and k.lower() in self.denylist: d[k] = AnnotatedValue.substituted_because_contains_sensitive_data() elif self.recursive: self.scrub_dict(v) # no-op unless v is a dict self.scrub_list(v) # no-op unless v is a list def scrub_request(self, event): # type: (Event) -> None with capture_internal_exceptions(): if "request" in event: if "headers" in event["request"]: self.scrub_dict(event["request"]["headers"]) if "cookies" in event["request"]: self.scrub_dict(event["request"]["cookies"]) if "data" in event["request"]: self.scrub_dict(event["request"]["data"]) def scrub_extra(self, event): # type: (Event) -> None with capture_internal_exceptions(): if "extra" in event: self.scrub_dict(event["extra"]) def scrub_user(self, event): # type: (Event) -> None with capture_internal_exceptions(): if "user" in event: self.scrub_dict(event["user"]) def scrub_breadcrumbs(self, event): # type: (Event) -> None with capture_internal_exceptions(): if "breadcrumbs" in event: if "values" in event["breadcrumbs"]: for value in event["breadcrumbs"]["values"]: if "data" in value: self.scrub_dict(value["data"]) def scrub_frames(self, event): # type: (Event) -> None with capture_internal_exceptions(): for frame in iter_event_frames(event): if "vars" in frame: self.scrub_dict(frame["vars"]) def scrub_spans(self, event): # type: (Event) -> None with capture_internal_exceptions(): if "spans" in event: for span in event["spans"]: if "data" in span: self.scrub_dict(span["data"]) def scrub_event(self, event): # type: (Event) -> None self.scrub_request(event) self.scrub_extra(event) self.scrub_user(event) self.scrub_breadcrumbs(event) self.scrub_frames(event) self.scrub_spans(event) sentry-python-2.18.0/sentry_sdk/serializer.py000066400000000000000000000314371471214654000213710ustar00rootroot00000000000000import sys import math from collections.abc import Mapping, Sequence, Set from datetime import datetime from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exception, disable_capture_event, format_timestamp, safe_repr, strip_string, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from types import TracebackType from typing import Any from typing import Callable from typing import ContextManager from typing import Dict from typing import List from typing import Optional from typing import Type from typing import Union from sentry_sdk._types import NotImplementedType Span = Dict[str, Any] ReprProcessor = Callable[[Any, Dict[str, Any]], Union[NotImplementedType, str]] Segment = Union[str, int] # Bytes are technically not strings in Python 3, but we can serialize them serializable_str_types = (str, bytes, bytearray, memoryview) # Maximum length of JSON-serialized event payloads that can be safely sent # before the server may reject the event due to its size. This is not intended # to reflect actual values defined server-side, but rather only be an upper # bound for events sent by the SDK. # # Can be overwritten if wanting to send more bytes, e.g. with a custom server. # When changing this, keep in mind that events may be a little bit larger than # this value due to attached metadata, so keep the number conservative. MAX_EVENT_BYTES = 10**6 # Maximum depth and breadth of databags. Excess data will be trimmed. If # max_request_body_size is "always", request bodies won't be trimmed. MAX_DATABAG_DEPTH = 5 MAX_DATABAG_BREADTH = 10 CYCLE_MARKER = "" global_repr_processors = [] # type: List[ReprProcessor] def add_global_repr_processor(processor): # type: (ReprProcessor) -> None global_repr_processors.append(processor) class Memo: __slots__ = ("_ids", "_objs") def __init__(self): # type: () -> None self._ids = {} # type: Dict[int, Any] self._objs = [] # type: List[Any] def memoize(self, obj): # type: (Any) -> ContextManager[bool] self._objs.append(obj) return self def __enter__(self): # type: () -> bool obj = self._objs[-1] if id(obj) in self._ids: return True else: self._ids[id(obj)] = obj return False def __exit__( self, ty, # type: Optional[Type[BaseException]] value, # type: Optional[BaseException] tb, # type: Optional[TracebackType] ): # type: (...) -> None self._ids.pop(id(self._objs.pop()), None) def serialize(event, **kwargs): # type: (Dict[str, Any], **Any) -> Dict[str, Any] """ A very smart serializer that takes a dict and emits a json-friendly dict. Currently used for serializing the final Event and also prematurely while fetching the stack local variables for each frame in a stacktrace. It works internally with 'databags' which are arbitrary data structures like Mapping, Sequence and Set. The algorithm itself is a recursive graph walk down the data structures it encounters. It has the following responsibilities: * Trimming databags and keeping them within MAX_DATABAG_BREADTH and MAX_DATABAG_DEPTH. * Calling safe_repr() on objects appropriately to keep them informative and readable in the final payload. * Annotating the payload with the _meta field whenever trimming happens. :param max_request_body_size: If set to "always", will never trim request bodies. :param max_value_length: The max length to strip strings to, defaults to sentry_sdk.consts.DEFAULT_MAX_VALUE_LENGTH :param is_vars: If we're serializing vars early, we want to repr() things that are JSON-serializable to make their type more apparent. For example, it's useful to see the difference between a unicode-string and a bytestring when viewing a stacktrace. :param custom_repr: A custom repr function that runs before safe_repr on the object to be serialized. If it returns None or throws internally, we will fallback to safe_repr. """ memo = Memo() path = [] # type: List[Segment] meta_stack = [] # type: List[Dict[str, Any]] keep_request_bodies = ( kwargs.pop("max_request_body_size", None) == "always" ) # type: bool max_value_length = kwargs.pop("max_value_length", None) # type: Optional[int] is_vars = kwargs.pop("is_vars", False) custom_repr = kwargs.pop("custom_repr", None) # type: Callable[..., Optional[str]] def _safe_repr_wrapper(value): # type: (Any) -> str try: repr_value = None if custom_repr is not None: repr_value = custom_repr(value) return repr_value or safe_repr(value) except Exception: return safe_repr(value) def _annotate(**meta): # type: (**Any) -> None while len(meta_stack) <= len(path): try: segment = path[len(meta_stack) - 1] node = meta_stack[-1].setdefault(str(segment), {}) except IndexError: node = {} meta_stack.append(node) meta_stack[-1].setdefault("", {}).update(meta) def _is_databag(): # type: () -> Optional[bool] """ A databag is any value that we need to trim. True for stuff like vars, request bodies, breadcrumbs and extra. :returns: `True` for "yes", `False` for :"no", `None` for "maybe soon". """ try: if is_vars: return True is_request_body = _is_request_body() if is_request_body in (True, None): return is_request_body p0 = path[0] if p0 == "breadcrumbs" and path[1] == "values": path[2] return True if p0 == "extra": return True except IndexError: return None return False def _is_request_body(): # type: () -> Optional[bool] try: if path[0] == "request" and path[1] == "data": return True except IndexError: return None return False def _serialize_node( obj, # type: Any is_databag=None, # type: Optional[bool] is_request_body=None, # type: Optional[bool] should_repr_strings=None, # type: Optional[bool] segment=None, # type: Optional[Segment] remaining_breadth=None, # type: Optional[Union[int, float]] remaining_depth=None, # type: Optional[Union[int, float]] ): # type: (...) -> Any if segment is not None: path.append(segment) try: with memo.memoize(obj) as result: if result: return CYCLE_MARKER return _serialize_node_impl( obj, is_databag=is_databag, is_request_body=is_request_body, should_repr_strings=should_repr_strings, remaining_depth=remaining_depth, remaining_breadth=remaining_breadth, ) except BaseException: capture_internal_exception(sys.exc_info()) if is_databag: return "" return None finally: if segment is not None: path.pop() del meta_stack[len(path) + 1 :] def _flatten_annotated(obj): # type: (Any) -> Any if isinstance(obj, AnnotatedValue): _annotate(**obj.metadata) obj = obj.value return obj def _serialize_node_impl( obj, is_databag, is_request_body, should_repr_strings, remaining_depth, remaining_breadth, ): # type: (Any, Optional[bool], Optional[bool], Optional[bool], Optional[Union[float, int]], Optional[Union[float, int]]) -> Any if isinstance(obj, AnnotatedValue): should_repr_strings = False if should_repr_strings is None: should_repr_strings = is_vars if is_databag is None: is_databag = _is_databag() if is_request_body is None: is_request_body = _is_request_body() if is_databag: if is_request_body and keep_request_bodies: remaining_depth = float("inf") remaining_breadth = float("inf") else: if remaining_depth is None: remaining_depth = MAX_DATABAG_DEPTH if remaining_breadth is None: remaining_breadth = MAX_DATABAG_BREADTH obj = _flatten_annotated(obj) if remaining_depth is not None and remaining_depth <= 0: _annotate(rem=[["!limit", "x"]]) if is_databag: return _flatten_annotated( strip_string(_safe_repr_wrapper(obj), max_length=max_value_length) ) return None if is_databag and global_repr_processors: hints = {"memo": memo, "remaining_depth": remaining_depth} for processor in global_repr_processors: result = processor(obj, hints) if result is not NotImplemented: return _flatten_annotated(result) sentry_repr = getattr(type(obj), "__sentry_repr__", None) if obj is None or isinstance(obj, (bool, int, float)): if should_repr_strings or ( isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj)) ): return _safe_repr_wrapper(obj) else: return obj elif callable(sentry_repr): return sentry_repr(obj) elif isinstance(obj, datetime): return ( str(format_timestamp(obj)) if not should_repr_strings else _safe_repr_wrapper(obj) ) elif isinstance(obj, Mapping): # Create temporary copy here to avoid calling too much code that # might mutate our dictionary while we're still iterating over it. obj = dict(obj.items()) rv_dict = {} # type: Dict[str, Any] i = 0 for k, v in obj.items(): if remaining_breadth is not None and i >= remaining_breadth: _annotate(len=len(obj)) break str_k = str(k) v = _serialize_node( v, segment=str_k, should_repr_strings=should_repr_strings, is_databag=is_databag, is_request_body=is_request_body, remaining_depth=( remaining_depth - 1 if remaining_depth is not None else None ), remaining_breadth=remaining_breadth, ) rv_dict[str_k] = v i += 1 return rv_dict elif not isinstance(obj, serializable_str_types) and isinstance( obj, (Set, Sequence) ): rv_list = [] for i, v in enumerate(obj): if remaining_breadth is not None and i >= remaining_breadth: _annotate(len=len(obj)) break rv_list.append( _serialize_node( v, segment=i, should_repr_strings=should_repr_strings, is_databag=is_databag, is_request_body=is_request_body, remaining_depth=( remaining_depth - 1 if remaining_depth is not None else None ), remaining_breadth=remaining_breadth, ) ) return rv_list if should_repr_strings: obj = _safe_repr_wrapper(obj) else: if isinstance(obj, bytes) or isinstance(obj, bytearray): obj = obj.decode("utf-8", "replace") if not isinstance(obj, str): obj = _safe_repr_wrapper(obj) is_span_description = ( len(path) == 3 and path[0] == "spans" and path[-1] == "description" ) if is_span_description: return obj return _flatten_annotated(strip_string(obj, max_length=max_value_length)) # # Start of serialize() function # disable_capture_event.set(True) try: serialized_event = _serialize_node(event, **kwargs) if not is_vars and meta_stack and isinstance(serialized_event, dict): serialized_event["_meta"] = meta_stack[0] return serialized_event finally: disable_capture_event.set(False) sentry-python-2.18.0/sentry_sdk/session.py000066400000000000000000000127031471214654000206760ustar00rootroot00000000000000import uuid from datetime import datetime, timezone from sentry_sdk.utils import format_timestamp from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional from typing import Union from typing import Any from typing import Dict from sentry_sdk._types import SessionStatus def _minute_trunc(ts): # type: (datetime) -> datetime return ts.replace(second=0, microsecond=0) def _make_uuid( val, # type: Union[str, uuid.UUID] ): # type: (...) -> uuid.UUID if isinstance(val, uuid.UUID): return val return uuid.UUID(val) class Session: def __init__( self, sid=None, # type: Optional[Union[str, uuid.UUID]] did=None, # type: Optional[str] timestamp=None, # type: Optional[datetime] started=None, # type: Optional[datetime] duration=None, # type: Optional[float] status=None, # type: Optional[SessionStatus] release=None, # type: Optional[str] environment=None, # type: Optional[str] user_agent=None, # type: Optional[str] ip_address=None, # type: Optional[str] errors=None, # type: Optional[int] user=None, # type: Optional[Any] session_mode="application", # type: str ): # type: (...) -> None if sid is None: sid = uuid.uuid4() if started is None: started = datetime.now(timezone.utc) if status is None: status = "ok" self.status = status self.did = None # type: Optional[str] self.started = started self.release = None # type: Optional[str] self.environment = None # type: Optional[str] self.duration = None # type: Optional[float] self.user_agent = None # type: Optional[str] self.ip_address = None # type: Optional[str] self.session_mode = session_mode # type: str self.errors = 0 self.update( sid=sid, did=did, timestamp=timestamp, duration=duration, release=release, environment=environment, user_agent=user_agent, ip_address=ip_address, errors=errors, user=user, ) @property def truncated_started(self): # type: (...) -> datetime return _minute_trunc(self.started) def update( self, sid=None, # type: Optional[Union[str, uuid.UUID]] did=None, # type: Optional[str] timestamp=None, # type: Optional[datetime] started=None, # type: Optional[datetime] duration=None, # type: Optional[float] status=None, # type: Optional[SessionStatus] release=None, # type: Optional[str] environment=None, # type: Optional[str] user_agent=None, # type: Optional[str] ip_address=None, # type: Optional[str] errors=None, # type: Optional[int] user=None, # type: Optional[Any] ): # type: (...) -> None # If a user is supplied we pull some data form it if user: if ip_address is None: ip_address = user.get("ip_address") if did is None: did = user.get("id") or user.get("email") or user.get("username") if sid is not None: self.sid = _make_uuid(sid) if did is not None: self.did = str(did) if timestamp is None: timestamp = datetime.now(timezone.utc) self.timestamp = timestamp if started is not None: self.started = started if duration is not None: self.duration = duration if release is not None: self.release = release if environment is not None: self.environment = environment if ip_address is not None: self.ip_address = ip_address if user_agent is not None: self.user_agent = user_agent if errors is not None: self.errors = errors if status is not None: self.status = status def close( self, status=None # type: Optional[SessionStatus] ): # type: (...) -> Any if status is None and self.status == "ok": status = "exited" if status is not None: self.update(status=status) def get_json_attrs( self, with_user_info=True # type: Optional[bool] ): # type: (...) -> Any attrs = {} if self.release is not None: attrs["release"] = self.release if self.environment is not None: attrs["environment"] = self.environment if with_user_info: if self.ip_address is not None: attrs["ip_address"] = self.ip_address if self.user_agent is not None: attrs["user_agent"] = self.user_agent return attrs def to_json(self): # type: (...) -> Any rv = { "sid": str(self.sid), "init": True, "started": format_timestamp(self.started), "timestamp": format_timestamp(self.timestamp), "status": self.status, } # type: Dict[str, Any] if self.errors: rv["errors"] = self.errors if self.did is not None: rv["did"] = self.did if self.duration is not None: rv["duration"] = self.duration attrs = self.get_json_attrs() if attrs: rv["attrs"] = attrs return rv sentry-python-2.18.0/sentry_sdk/sessions.py000066400000000000000000000217351471214654000210660ustar00rootroot00000000000000import os import time import warnings from threading import Thread, Lock from contextlib import contextmanager import sentry_sdk from sentry_sdk.envelope import Envelope from sentry_sdk.session import Session from sentry_sdk.utils import format_timestamp from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Generator from typing import List from typing import Optional from typing import Union def is_auto_session_tracking_enabled(hub=None): # type: (Optional[sentry_sdk.Hub]) -> Union[Any, bool, None] """DEPRECATED: Utility function to find out if session tracking is enabled.""" # Internal callers should use private _is_auto_session_tracking_enabled, instead. warnings.warn( "This function is deprecated and will be removed in the next major release. " "There is no public API replacement.", DeprecationWarning, stacklevel=2, ) if hub is None: hub = sentry_sdk.Hub.current should_track = hub.scope._force_auto_session_tracking if should_track is None: client_options = hub.client.options if hub.client else {} should_track = client_options.get("auto_session_tracking", False) return should_track @contextmanager def auto_session_tracking(hub=None, session_mode="application"): # type: (Optional[sentry_sdk.Hub], str) -> Generator[None, None, None] """DEPRECATED: Use track_session instead Starts and stops a session automatically around a block. """ warnings.warn( "This function is deprecated and will be removed in the next major release. " "Use track_session instead.", DeprecationWarning, stacklevel=2, ) if hub is None: hub = sentry_sdk.Hub.current with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) should_track = is_auto_session_tracking_enabled(hub) if should_track: hub.start_session(session_mode=session_mode) try: yield finally: if should_track: hub.end_session() def is_auto_session_tracking_enabled_scope(scope): # type: (sentry_sdk.Scope) -> bool """ DEPRECATED: Utility function to find out if session tracking is enabled. """ warnings.warn( "This function is deprecated and will be removed in the next major release. " "There is no public API replacement.", DeprecationWarning, stacklevel=2, ) # Internal callers should use private _is_auto_session_tracking_enabled, instead. return _is_auto_session_tracking_enabled(scope) def _is_auto_session_tracking_enabled(scope): # type: (sentry_sdk.Scope) -> bool """ Utility function to find out if session tracking is enabled. """ should_track = scope._force_auto_session_tracking if should_track is None: client_options = sentry_sdk.get_client().options should_track = client_options.get("auto_session_tracking", False) return should_track @contextmanager def auto_session_tracking_scope(scope, session_mode="application"): # type: (sentry_sdk.Scope, str) -> Generator[None, None, None] """DEPRECATED: This function is a deprecated alias for track_session. Starts and stops a session automatically around a block. """ warnings.warn( "This function is a deprecated alias for track_session and will be removed in the next major release.", DeprecationWarning, stacklevel=2, ) with track_session(scope, session_mode=session_mode): yield @contextmanager def track_session(scope, session_mode="application"): # type: (sentry_sdk.Scope, str) -> Generator[None, None, None] """ Start a new session in the provided scope, assuming session tracking is enabled. This is a no-op context manager if session tracking is not enabled. """ should_track = _is_auto_session_tracking_enabled(scope) if should_track: scope.start_session(session_mode=session_mode) try: yield finally: if should_track: scope.end_session() TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed") MAX_ENVELOPE_ITEMS = 100 def make_aggregate_envelope(aggregate_states, attrs): # type: (Any, Any) -> Any return {"attrs": dict(attrs), "aggregates": list(aggregate_states.values())} class SessionFlusher: def __init__( self, capture_func, # type: Callable[[Envelope], None] flush_interval=60, # type: int ): # type: (...) -> None self.capture_func = capture_func self.flush_interval = flush_interval self.pending_sessions = [] # type: List[Any] self.pending_aggregates = {} # type: Dict[Any, Any] self._thread = None # type: Optional[Thread] self._thread_lock = Lock() self._aggregate_lock = Lock() self._thread_for_pid = None # type: Optional[int] self._running = True def flush(self): # type: (...) -> None pending_sessions = self.pending_sessions self.pending_sessions = [] with self._aggregate_lock: pending_aggregates = self.pending_aggregates self.pending_aggregates = {} envelope = Envelope() for session in pending_sessions: if len(envelope.items) == MAX_ENVELOPE_ITEMS: self.capture_func(envelope) envelope = Envelope() envelope.add_session(session) for attrs, states in pending_aggregates.items(): if len(envelope.items) == MAX_ENVELOPE_ITEMS: self.capture_func(envelope) envelope = Envelope() envelope.add_sessions(make_aggregate_envelope(states, attrs)) if len(envelope.items) > 0: self.capture_func(envelope) def _ensure_running(self): # type: (...) -> None """ Check that we have an active thread to run in, or create one if not. Note that this might fail (e.g. in Python 3.12 it's not possible to spawn new threads at interpreter shutdown). In that case self._running will be False after running this function. """ if self._thread_for_pid == os.getpid() and self._thread is not None: return None with self._thread_lock: if self._thread_for_pid == os.getpid() and self._thread is not None: return None def _thread(): # type: (...) -> None while self._running: time.sleep(self.flush_interval) if self._running: self.flush() thread = Thread(target=_thread) thread.daemon = True try: thread.start() except RuntimeError: # Unfortunately at this point the interpreter is in a state that no # longer allows us to spawn a thread and we have to bail. self._running = False return None self._thread = thread self._thread_for_pid = os.getpid() return None def add_aggregate_session( self, session # type: Session ): # type: (...) -> None # NOTE on `session.did`: # the protocol can deal with buckets that have a distinct-id, however # in practice we expect the python SDK to have an extremely high cardinality # here, effectively making aggregation useless, therefore we do not # aggregate per-did. # For this part we can get away with using the global interpreter lock with self._aggregate_lock: attrs = session.get_json_attrs(with_user_info=False) primary_key = tuple(sorted(attrs.items())) secondary_key = session.truncated_started # (, session.did) states = self.pending_aggregates.setdefault(primary_key, {}) state = states.setdefault(secondary_key, {}) if "started" not in state: state["started"] = format_timestamp(session.truncated_started) # if session.did is not None: # state["did"] = session.did if session.status == "crashed": state["crashed"] = state.get("crashed", 0) + 1 elif session.status == "abnormal": state["abnormal"] = state.get("abnormal", 0) + 1 elif session.errors > 0: state["errored"] = state.get("errored", 0) + 1 else: state["exited"] = state.get("exited", 0) + 1 def add_session( self, session # type: Session ): # type: (...) -> None if session.session_mode == "request": self.add_aggregate_session(session) else: self.pending_sessions.append(session.to_json()) self._ensure_running() def kill(self): # type: (...) -> None self._running = False def __del__(self): # type: (...) -> None self.kill() sentry-python-2.18.0/sentry_sdk/spotlight.py000066400000000000000000000074711471214654000212360ustar00rootroot00000000000000import io import os import urllib.parse import urllib.request import urllib.error import urllib3 from itertools import chain from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import Optional from sentry_sdk.utils import logger, env_to_bool, capture_internal_exceptions from sentry_sdk.envelope import Envelope DEFAULT_SPOTLIGHT_URL = "http://localhost:8969/stream" DJANGO_SPOTLIGHT_MIDDLEWARE_PATH = "sentry_sdk.spotlight.SpotlightMiddleware" class SpotlightClient: def __init__(self, url): # type: (str) -> None self.url = url self.http = urllib3.PoolManager() self.tries = 0 def capture_envelope(self, envelope): # type: (Envelope) -> None if self.tries > 3: logger.warning( "Too many errors sending to Spotlight, stop sending events there." ) return body = io.BytesIO() envelope.serialize_into(body) try: req = self.http.request( url=self.url, body=body.getvalue(), method="POST", headers={ "Content-Type": "application/x-sentry-envelope", }, ) req.close() except Exception as e: self.tries += 1 logger.warning(str(e)) try: from django.http import HttpResponseServerError from django.conf import settings class SpotlightMiddleware: def __init__(self, get_response): # type: (Any, Callable[..., Any]) -> None self.get_response = get_response def __call__(self, request): # type: (Any, Any) -> Any return self.get_response(request) def process_exception(self, _request, exception): # type: (Any, Any, Exception) -> Optional[HttpResponseServerError] if not settings.DEBUG: return None import sentry_sdk.api spotlight_client = sentry_sdk.api.get_client().spotlight if spotlight_client is None: return None # Spotlight URL has a trailing `/stream` part at the end so split it off spotlight_url = spotlight_client.url.rsplit("/", 1)[0] try: spotlight = urllib.request.urlopen(spotlight_url).read().decode("utf-8") except urllib.error.URLError: return None else: event_id = sentry_sdk.api.capture_exception(exception) return HttpResponseServerError( spotlight.replace( "", ( f'' ''.format( event_id=event_id ) ), ) ) except ImportError: settings = None def setup_spotlight(options): # type: (Dict[str, Any]) -> Optional[SpotlightClient] url = options.get("spotlight") if isinstance(url, str): pass elif url is True: url = DEFAULT_SPOTLIGHT_URL else: return None if ( settings is not None and settings.DEBUG and env_to_bool(os.environ.get("SENTRY_SPOTLIGHT_ON_ERROR", "1")) ): with capture_internal_exceptions(): middleware = settings.MIDDLEWARE if DJANGO_SPOTLIGHT_MIDDLEWARE_PATH not in middleware: settings.MIDDLEWARE = type(middleware)( chain(middleware, (DJANGO_SPOTLIGHT_MIDDLEWARE_PATH,)) ) return SpotlightClient(url) sentry-python-2.18.0/sentry_sdk/tracing.py000066400000000000000000001314671471214654000206530ustar00rootroot00000000000000import uuid import random import warnings from datetime import datetime, timedelta, timezone import sentry_sdk from sentry_sdk.consts import INSTRUMENTER, SPANSTATUS, SPANDATA from sentry_sdk.profiler.continuous_profiler import get_profiler_id from sentry_sdk.utils import ( get_current_thread_meta, is_valid_sample_rate, logger, nanosecond_time, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Callable, Mapping, MutableMapping from typing import Any from typing import Dict from typing import Iterator from typing import List from typing import Optional from typing import overload from typing import ParamSpec from typing import Tuple from typing import Union from typing import TypeVar from typing_extensions import TypedDict, Unpack P = ParamSpec("P") R = TypeVar("R") import sentry_sdk.profiler from sentry_sdk._types import ( Event, MeasurementUnit, SamplingContext, MeasurementValue, ) class SpanKwargs(TypedDict, total=False): trace_id: str """ The trace ID of the root span. If this new span is to be the root span, omit this parameter, and a new trace ID will be generated. """ span_id: str """The span ID of this span. If omitted, a new span ID will be generated.""" parent_span_id: str """The span ID of the parent span, if applicable.""" same_process_as_parent: bool """Whether this span is in the same process as the parent span.""" sampled: bool """ Whether the span should be sampled. Overrides the default sampling decision for this span when provided. """ op: str """ The span's operation. A list of recommended values is available here: https://develop.sentry.dev/sdk/performance/span-operations/ """ description: str """A description of what operation is being performed within the span. This argument is DEPRECATED. Please use the `name` parameter, instead.""" hub: Optional["sentry_sdk.Hub"] """The hub to use for this span. This argument is DEPRECATED. Please use the `scope` parameter, instead.""" status: str """The span's status. Possible values are listed at https://develop.sentry.dev/sdk/event-payloads/span/""" containing_transaction: Optional["Transaction"] """The transaction that this span belongs to.""" start_timestamp: Optional[Union[datetime, float]] """ The timestamp when the span started. If omitted, the current time will be used. """ scope: "sentry_sdk.Scope" """The scope to use for this span. If not provided, we use the current scope.""" origin: str """ The origin of the span. See https://develop.sentry.dev/sdk/performance/trace-origin/ Default "manual". """ name: str """A string describing what operation is being performed within the span/transaction.""" class TransactionKwargs(SpanKwargs, total=False): source: str """ A string describing the source of the transaction name. This will be used to determine the transaction's type. See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations for more information. Default "custom". """ parent_sampled: bool """Whether the parent transaction was sampled. If True this transaction will be kept, if False it will be discarded.""" baggage: "Baggage" """The W3C baggage header value. (see https://www.w3.org/TR/baggage/)""" ProfileContext = TypedDict( "ProfileContext", { "profiler_id": str, }, ) BAGGAGE_HEADER_NAME = "baggage" SENTRY_TRACE_HEADER_NAME = "sentry-trace" # Transaction source # see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations TRANSACTION_SOURCE_CUSTOM = "custom" TRANSACTION_SOURCE_URL = "url" TRANSACTION_SOURCE_ROUTE = "route" TRANSACTION_SOURCE_VIEW = "view" TRANSACTION_SOURCE_COMPONENT = "component" TRANSACTION_SOURCE_TASK = "task" # These are typically high cardinality and the server hates them LOW_QUALITY_TRANSACTION_SOURCES = [ TRANSACTION_SOURCE_URL, ] SOURCE_FOR_STYLE = { "endpoint": TRANSACTION_SOURCE_COMPONENT, "function_name": TRANSACTION_SOURCE_COMPONENT, "handler_name": TRANSACTION_SOURCE_COMPONENT, "method_and_path_pattern": TRANSACTION_SOURCE_ROUTE, "path": TRANSACTION_SOURCE_URL, "route_name": TRANSACTION_SOURCE_COMPONENT, "route_pattern": TRANSACTION_SOURCE_ROUTE, "uri_template": TRANSACTION_SOURCE_ROUTE, "url": TRANSACTION_SOURCE_ROUTE, } def get_span_status_from_http_code(http_status_code): # type: (int) -> str """ Returns the Sentry status corresponding to the given HTTP status code. See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context """ if http_status_code < 400: return SPANSTATUS.OK elif 400 <= http_status_code < 500: if http_status_code == 403: return SPANSTATUS.PERMISSION_DENIED elif http_status_code == 404: return SPANSTATUS.NOT_FOUND elif http_status_code == 429: return SPANSTATUS.RESOURCE_EXHAUSTED elif http_status_code == 413: return SPANSTATUS.FAILED_PRECONDITION elif http_status_code == 401: return SPANSTATUS.UNAUTHENTICATED elif http_status_code == 409: return SPANSTATUS.ALREADY_EXISTS else: return SPANSTATUS.INVALID_ARGUMENT elif 500 <= http_status_code < 600: if http_status_code == 504: return SPANSTATUS.DEADLINE_EXCEEDED elif http_status_code == 501: return SPANSTATUS.UNIMPLEMENTED elif http_status_code == 503: return SPANSTATUS.UNAVAILABLE else: return SPANSTATUS.INTERNAL_ERROR return SPANSTATUS.UNKNOWN_ERROR class _SpanRecorder: """Limits the number of spans recorded in a transaction.""" __slots__ = ("maxlen", "spans") def __init__(self, maxlen): # type: (int) -> None # FIXME: this is `maxlen - 1` only to preserve historical behavior # enforced by tests. # Either this should be changed to `maxlen` or the JS SDK implementation # should be changed to match a consistent interpretation of what maxlen # limits: either transaction+spans or only child spans. self.maxlen = maxlen - 1 self.spans = [] # type: List[Span] def add(self, span): # type: (Span) -> None if len(self.spans) > self.maxlen: span._span_recorder = None else: self.spans.append(span) class Span: """A span holds timing information of a block of code. Spans can have multiple child spans thus forming a span tree. :param trace_id: The trace ID of the root span. If this new span is to be the root span, omit this parameter, and a new trace ID will be generated. :param span_id: The span ID of this span. If omitted, a new span ID will be generated. :param parent_span_id: The span ID of the parent span, if applicable. :param same_process_as_parent: Whether this span is in the same process as the parent span. :param sampled: Whether the span should be sampled. Overrides the default sampling decision for this span when provided. :param op: The span's operation. A list of recommended values is available here: https://develop.sentry.dev/sdk/performance/span-operations/ :param description: A description of what operation is being performed within the span. .. deprecated:: 2.15.0 Please use the `name` parameter, instead. :param name: A string describing what operation is being performed within the span. :param hub: The hub to use for this span. .. deprecated:: 2.0.0 Please use the `scope` parameter, instead. :param status: The span's status. Possible values are listed at https://develop.sentry.dev/sdk/event-payloads/span/ :param containing_transaction: The transaction that this span belongs to. :param start_timestamp: The timestamp when the span started. If omitted, the current time will be used. :param scope: The scope to use for this span. If not provided, we use the current scope. """ __slots__ = ( "trace_id", "span_id", "parent_span_id", "same_process_as_parent", "sampled", "op", "description", "_measurements", "start_timestamp", "_start_timestamp_monotonic_ns", "status", "timestamp", "_tags", "_data", "_span_recorder", "hub", "_context_manager_state", "_containing_transaction", "_local_aggregator", "scope", "origin", "name", ) def __init__( self, trace_id=None, # type: Optional[str] span_id=None, # type: Optional[str] parent_span_id=None, # type: Optional[str] same_process_as_parent=True, # type: bool sampled=None, # type: Optional[bool] op=None, # type: Optional[str] description=None, # type: Optional[str] hub=None, # type: Optional[sentry_sdk.Hub] # deprecated status=None, # type: Optional[str] containing_transaction=None, # type: Optional[Transaction] start_timestamp=None, # type: Optional[Union[datetime, float]] scope=None, # type: Optional[sentry_sdk.Scope] origin="manual", # type: str name=None, # type: Optional[str] ): # type: (...) -> None self.trace_id = trace_id or uuid.uuid4().hex self.span_id = span_id or uuid.uuid4().hex[16:] self.parent_span_id = parent_span_id self.same_process_as_parent = same_process_as_parent self.sampled = sampled self.op = op self.description = name or description self.status = status self.hub = hub # backwards compatibility self.scope = scope self.origin = origin self._measurements = {} # type: Dict[str, MeasurementValue] self._tags = {} # type: MutableMapping[str, str] self._data = {} # type: Dict[str, Any] self._containing_transaction = containing_transaction if hub is not None: warnings.warn( "The `hub` parameter is deprecated. Please use `scope` instead.", DeprecationWarning, stacklevel=2, ) self.scope = self.scope or hub.scope if start_timestamp is None: start_timestamp = datetime.now(timezone.utc) elif isinstance(start_timestamp, float): start_timestamp = datetime.fromtimestamp(start_timestamp, timezone.utc) self.start_timestamp = start_timestamp try: # profiling depends on this value and requires that # it is measured in nanoseconds self._start_timestamp_monotonic_ns = nanosecond_time() except AttributeError: pass #: End timestamp of span self.timestamp = None # type: Optional[datetime] self._span_recorder = None # type: Optional[_SpanRecorder] self._local_aggregator = None # type: Optional[LocalAggregator] self.update_active_thread() self.set_profiler_id(get_profiler_id()) # TODO this should really live on the Transaction class rather than the Span # class def init_span_recorder(self, maxlen): # type: (int) -> None if self._span_recorder is None: self._span_recorder = _SpanRecorder(maxlen) def _get_local_aggregator(self): # type: (...) -> LocalAggregator rv = self._local_aggregator if rv is None: rv = self._local_aggregator = LocalAggregator() return rv def __repr__(self): # type: () -> str return ( "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>" % ( self.__class__.__name__, self.op, self.description, self.trace_id, self.span_id, self.parent_span_id, self.sampled, self.origin, ) ) def __enter__(self): # type: () -> Span scope = self.scope or sentry_sdk.get_current_scope() old_span = scope.span scope.span = self self._context_manager_state = (scope, old_span) return self def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None if value is not None: self.set_status(SPANSTATUS.INTERNAL_ERROR) scope, old_span = self._context_manager_state del self._context_manager_state self.finish(scope) scope.span = old_span @property def containing_transaction(self): # type: () -> Optional[Transaction] """The ``Transaction`` that this span belongs to. The ``Transaction`` is the root of the span tree, so one could also think of this ``Transaction`` as the "root span".""" # this is a getter rather than a regular attribute so that transactions # can return `self` here instead (as a way to prevent them circularly # referencing themselves) return self._containing_transaction def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): # type: (str, **Any) -> Span """ Start a sub-span from the current span or transaction. Takes the same arguments as the initializer of :py:class:`Span`. The trace id, sampling decision, transaction pointer, and span recorder are inherited from the current span/transaction. The instrumenter parameter is deprecated for user code, and it will be removed in the next major version. Going forward, it should only be used by the SDK itself. """ if kwargs.get("description") is not None: warnings.warn( "The `description` parameter is deprecated. Please use `name` instead.", DeprecationWarning, stacklevel=2, ) configuration_instrumenter = sentry_sdk.get_client().options["instrumenter"] if instrumenter != configuration_instrumenter: return NoOpSpan() kwargs.setdefault("sampled", self.sampled) child = Span( trace_id=self.trace_id, parent_span_id=self.span_id, containing_transaction=self.containing_transaction, **kwargs, ) span_recorder = ( self.containing_transaction and self.containing_transaction._span_recorder ) if span_recorder: span_recorder.add(child) return child @classmethod def continue_from_environ( cls, environ, # type: Mapping[str, str] **kwargs, # type: Any ): # type: (...) -> Transaction """ Create a Transaction with the given params, then add in data pulled from the ``sentry-trace`` and ``baggage`` headers from the environ (if any) before returning the Transaction. This is different from :py:meth:`~sentry_sdk.tracing.Span.continue_from_headers` in that it assumes header names in the form ``HTTP_HEADER_NAME`` - such as you would get from a WSGI/ASGI environ - rather than the form ``header-name``. :param environ: The ASGI/WSGI environ to pull information from. """ if cls is Span: logger.warning( "Deprecated: use Transaction.continue_from_environ " "instead of Span.continue_from_environ." ) return Transaction.continue_from_headers(EnvironHeaders(environ), **kwargs) @classmethod def continue_from_headers( cls, headers, # type: Mapping[str, str] **kwargs, # type: Any ): # type: (...) -> Transaction """ Create a transaction with the given params (including any data pulled from the ``sentry-trace`` and ``baggage`` headers). :param headers: The dictionary with the HTTP headers to pull information from. """ # TODO move this to the Transaction class if cls is Span: logger.warning( "Deprecated: use Transaction.continue_from_headers " "instead of Span.continue_from_headers." ) # TODO-neel move away from this kwargs stuff, it's confusing and opaque # make more explicit baggage = Baggage.from_incoming_header(headers.get(BAGGAGE_HEADER_NAME)) kwargs.update({BAGGAGE_HEADER_NAME: baggage}) sentrytrace_kwargs = extract_sentrytrace_data( headers.get(SENTRY_TRACE_HEADER_NAME) ) if sentrytrace_kwargs is not None: kwargs.update(sentrytrace_kwargs) # If there's an incoming sentry-trace but no incoming baggage header, # for instance in traces coming from older SDKs, # baggage will be empty and immutable and won't be populated as head SDK. baggage.freeze() transaction = Transaction(**kwargs) transaction.same_process_as_parent = False return transaction def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] """ Creates a generator which returns the span's ``sentry-trace`` and ``baggage`` headers. If the span's containing transaction doesn't yet have a ``baggage`` value, this will cause one to be generated and stored. """ if not self.containing_transaction: # Do not propagate headers if there is no containing transaction. Otherwise, this # span ends up being the root span of a new trace, and since it does not get sent # to Sentry, the trace will be missing a root transaction. The dynamic sampling # context will also be missing, breaking dynamic sampling & traces. return yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent() baggage = self.containing_transaction.get_baggage().serialize() if baggage: yield BAGGAGE_HEADER_NAME, baggage @classmethod def from_traceparent( cls, traceparent, # type: Optional[str] **kwargs, # type: Any ): # type: (...) -> Optional[Transaction] """ DEPRECATED: Use :py:meth:`sentry_sdk.tracing.Span.continue_from_headers`. Create a ``Transaction`` with the given params, then add in data pulled from the given ``sentry-trace`` header value before returning the ``Transaction``. """ logger.warning( "Deprecated: Use Transaction.continue_from_headers(headers, **kwargs) " "instead of from_traceparent(traceparent, **kwargs)" ) if not traceparent: return None return cls.continue_from_headers( {SENTRY_TRACE_HEADER_NAME: traceparent}, **kwargs ) def to_traceparent(self): # type: () -> str if self.sampled is True: sampled = "1" elif self.sampled is False: sampled = "0" else: sampled = None traceparent = "%s-%s" % (self.trace_id, self.span_id) if sampled is not None: traceparent += "-%s" % (sampled,) return traceparent def to_baggage(self): # type: () -> Optional[Baggage] """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` associated with this ``Span``, if any. (Taken from the root of the span tree.) """ if self.containing_transaction: return self.containing_transaction.get_baggage() return None def set_tag(self, key, value): # type: (str, Any) -> None self._tags[key] = value def set_data(self, key, value): # type: (str, Any) -> None self._data[key] = value def set_status(self, value): # type: (str) -> None self.status = value def set_measurement(self, name, value, unit=""): # type: (str, float, MeasurementUnit) -> None self._measurements[name] = {"value": value, "unit": unit} def set_thread(self, thread_id, thread_name): # type: (Optional[int], Optional[str]) -> None if thread_id is not None: self.set_data(SPANDATA.THREAD_ID, str(thread_id)) if thread_name is not None: self.set_data(SPANDATA.THREAD_NAME, thread_name) def set_profiler_id(self, profiler_id): # type: (Optional[str]) -> None if profiler_id is not None: self.set_data(SPANDATA.PROFILER_ID, profiler_id) def set_http_status(self, http_status): # type: (int) -> None self.set_tag( "http.status_code", str(http_status) ) # we keep this for backwards compatibility self.set_data(SPANDATA.HTTP_STATUS_CODE, http_status) self.set_status(get_span_status_from_http_code(http_status)) def is_success(self): # type: () -> bool return self.status == "ok" def finish(self, scope=None, end_timestamp=None): # type: (Optional[sentry_sdk.Scope], Optional[Union[float, datetime]]) -> Optional[str] """ Sets the end timestamp of the span. Additionally it also creates a breadcrumb from the span, if the span represents a database or HTTP request. :param scope: The scope to use for this transaction. If not provided, the current scope will be used. :param end_timestamp: Optional timestamp that should be used as timestamp instead of the current time. :return: Always ``None``. The type is ``Optional[str]`` to match the return value of :py:meth:`sentry_sdk.tracing.Transaction.finish`. """ if self.timestamp is not None: # This span is already finished, ignore. return None try: if end_timestamp: if isinstance(end_timestamp, float): end_timestamp = datetime.fromtimestamp(end_timestamp, timezone.utc) self.timestamp = end_timestamp else: elapsed = nanosecond_time() - self._start_timestamp_monotonic_ns self.timestamp = self.start_timestamp + timedelta( microseconds=elapsed / 1000 ) except AttributeError: self.timestamp = datetime.now(timezone.utc) scope = scope or sentry_sdk.get_current_scope() maybe_create_breadcrumbs_from_span(scope, self) return None def to_json(self): # type: () -> Dict[str, Any] """Returns a JSON-compatible representation of the span.""" rv = { "trace_id": self.trace_id, "span_id": self.span_id, "parent_span_id": self.parent_span_id, "same_process_as_parent": self.same_process_as_parent, "op": self.op, "description": self.description, "start_timestamp": self.start_timestamp, "timestamp": self.timestamp, "origin": self.origin, } # type: Dict[str, Any] if self.status: self._tags["status"] = self.status if self._local_aggregator is not None: metrics_summary = self._local_aggregator.to_json() if metrics_summary: rv["_metrics_summary"] = metrics_summary if len(self._measurements) > 0: rv["measurements"] = self._measurements tags = self._tags if tags: rv["tags"] = tags data = self._data if data: rv["data"] = data return rv def get_trace_context(self): # type: () -> Any rv = { "trace_id": self.trace_id, "span_id": self.span_id, "parent_span_id": self.parent_span_id, "op": self.op, "description": self.description, "origin": self.origin, } # type: Dict[str, Any] if self.status: rv["status"] = self.status if self.containing_transaction: rv["dynamic_sampling_context"] = ( self.containing_transaction.get_baggage().dynamic_sampling_context() ) data = {} thread_id = self._data.get(SPANDATA.THREAD_ID) if thread_id is not None: data["thread.id"] = thread_id thread_name = self._data.get(SPANDATA.THREAD_NAME) if thread_name is not None: data["thread.name"] = thread_name if data: rv["data"] = data return rv def get_profile_context(self): # type: () -> Optional[ProfileContext] profiler_id = self._data.get(SPANDATA.PROFILER_ID) if profiler_id is None: return None return { "profiler_id": profiler_id, } def update_active_thread(self): # type: () -> None thread_id, thread_name = get_current_thread_meta() self.set_thread(thread_id, thread_name) class Transaction(Span): """The Transaction is the root element that holds all the spans for Sentry performance instrumentation. :param name: Identifier of the transaction. Will show up in the Sentry UI. :param parent_sampled: Whether the parent transaction was sampled. If True this transaction will be kept, if False it will be discarded. :param baggage: The W3C baggage header value. (see https://www.w3.org/TR/baggage/) :param source: A string describing the source of the transaction name. This will be used to determine the transaction's type. See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations for more information. Default "custom". :param kwargs: Additional arguments to be passed to the Span constructor. See :py:class:`sentry_sdk.tracing.Span` for available arguments. """ __slots__ = ( "name", "source", "parent_sampled", # used to create baggage value for head SDKs in dynamic sampling "sample_rate", "_measurements", "_contexts", "_profile", "_baggage", ) def __init__( # type: ignore[misc] self, name="", # type: str parent_sampled=None, # type: Optional[bool] baggage=None, # type: Optional[Baggage] source=TRANSACTION_SOURCE_CUSTOM, # type: str **kwargs, # type: Unpack[SpanKwargs] ): # type: (...) -> None super().__init__(**kwargs) self.name = name self.source = source self.sample_rate = None # type: Optional[float] self.parent_sampled = parent_sampled self._measurements = {} # type: Dict[str, MeasurementValue] self._contexts = {} # type: Dict[str, Any] self._profile = ( None ) # type: Optional[sentry_sdk.profiler.transaction_profiler.Profile] self._baggage = baggage def __repr__(self): # type: () -> str return ( "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, source=%r, origin=%r)>" % ( self.__class__.__name__, self.name, self.op, self.trace_id, self.span_id, self.parent_span_id, self.sampled, self.source, self.origin, ) ) def _possibly_started(self): # type: () -> bool """Returns whether the transaction might have been started. If this returns False, we know that the transaction was not started with sentry_sdk.start_transaction, and therefore the transaction will be discarded. """ # We must explicitly check self.sampled is False since self.sampled can be None return self._span_recorder is not None or self.sampled is False def __enter__(self): # type: () -> Transaction if not self._possibly_started(): logger.debug( "Transaction was entered without being started with sentry_sdk.start_transaction." "The transaction will not be sent to Sentry. To fix, start the transaction by" "passing it to sentry_sdk.start_transaction." ) super().__enter__() if self._profile is not None: self._profile.__enter__() return self def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None if self._profile is not None: self._profile.__exit__(ty, value, tb) super().__exit__(ty, value, tb) @property def containing_transaction(self): # type: () -> Transaction """The root element of the span tree. In the case of a transaction it is the transaction itself. """ # Transactions (as spans) belong to themselves (as transactions). This # is a getter rather than a regular attribute to avoid having a circular # reference. return self def _get_scope_from_finish_args( self, scope_arg, # type: Optional[Union[sentry_sdk.Scope, sentry_sdk.Hub]] hub_arg, # type: Optional[Union[sentry_sdk.Scope, sentry_sdk.Hub]] ): # type: (...) -> Optional[sentry_sdk.Scope] """ Logic to get the scope from the arguments passed to finish. This function exists for backwards compatibility with the old finish. TODO: Remove this function in the next major version. """ scope_or_hub = scope_arg if hub_arg is not None: warnings.warn( "The `hub` parameter is deprecated. Please use the `scope` parameter, instead.", DeprecationWarning, stacklevel=3, ) scope_or_hub = hub_arg if isinstance(scope_or_hub, sentry_sdk.Hub): warnings.warn( "Passing a Hub to finish is deprecated. Please pass a Scope, instead.", DeprecationWarning, stacklevel=3, ) return scope_or_hub.scope return scope_or_hub def finish( self, scope=None, # type: Optional[sentry_sdk.Scope] end_timestamp=None, # type: Optional[Union[float, datetime]] *, hub=None, # type: Optional[sentry_sdk.Hub] ): # type: (...) -> Optional[str] """Finishes the transaction and sends it to Sentry. All finished spans in the transaction will also be sent to Sentry. :param scope: The Scope to use for this transaction. If not provided, the current Scope will be used. :param end_timestamp: Optional timestamp that should be used as timestamp instead of the current time. :param hub: The hub to use for this transaction. This argument is DEPRECATED. Please use the `scope` parameter, instead. :return: The event ID if the transaction was sent to Sentry, otherwise None. """ if self.timestamp is not None: # This transaction is already finished, ignore. return None # For backwards compatibility, we must handle the case where `scope` # or `hub` could both either be a `Scope` or a `Hub`. scope = self._get_scope_from_finish_args( scope, hub ) # type: Optional[sentry_sdk.Scope] scope = scope or self.scope or sentry_sdk.get_current_scope() client = sentry_sdk.get_client() if not client.is_active(): # We have no active client and therefore nowhere to send this transaction. return None if self._span_recorder is None: # Explicit check against False needed because self.sampled might be None if self.sampled is False: logger.debug("Discarding transaction because sampled = False") else: logger.debug( "Discarding transaction because it was not started with sentry_sdk.start_transaction" ) # This is not entirely accurate because discards here are not # exclusively based on sample rate but also traces sampler, but # we handle this the same here. if client.transport and has_tracing_enabled(client.options): if client.monitor and client.monitor.downsample_factor > 0: reason = "backpressure" else: reason = "sample_rate" client.transport.record_lost_event(reason, data_category="transaction") # Only one span (the transaction itself) is discarded, since we did not record any spans here. client.transport.record_lost_event(reason, data_category="span") return None if not self.name: logger.warning( "Transaction has no name, falling back to ``." ) self.name = "" super().finish(scope, end_timestamp) if not self.sampled: # At this point a `sampled = None` should have already been resolved # to a concrete decision. if self.sampled is None: logger.warning("Discarding transaction without sampling decision.") return None finished_spans = [ span.to_json() for span in self._span_recorder.spans if span.timestamp is not None ] # we do this to break the circular reference of transaction -> span # recorder -> span -> containing transaction (which is where we started) # before either the spans or the transaction goes out of scope and has # to be garbage collected self._span_recorder = None contexts = {} contexts.update(self._contexts) contexts.update({"trace": self.get_trace_context()}) profile_context = self.get_profile_context() if profile_context is not None: contexts.update({"profile": profile_context}) event = { "type": "transaction", "transaction": self.name, "transaction_info": {"source": self.source}, "contexts": contexts, "tags": self._tags, "timestamp": self.timestamp, "start_timestamp": self.start_timestamp, "spans": finished_spans, } # type: Event if self._profile is not None and self._profile.valid(): event["profile"] = self._profile self._profile = None event["measurements"] = self._measurements # This is here since `to_json` is not invoked. This really should # be gone when we switch to onlyspans. if self._local_aggregator is not None: metrics_summary = self._local_aggregator.to_json() if metrics_summary: event["_metrics_summary"] = metrics_summary return scope.capture_event(event) def set_measurement(self, name, value, unit=""): # type: (str, float, MeasurementUnit) -> None self._measurements[name] = {"value": value, "unit": unit} def set_context(self, key, value): # type: (str, Any) -> None """Sets a context. Transactions can have multiple contexts and they should follow the format described in the "Contexts Interface" documentation. :param key: The name of the context. :param value: The information about the context. """ self._contexts[key] = value def set_http_status(self, http_status): # type: (int) -> None """Sets the status of the Transaction according to the given HTTP status. :param http_status: The HTTP status code.""" super().set_http_status(http_status) self.set_context("response", {"status_code": http_status}) def to_json(self): # type: () -> Dict[str, Any] """Returns a JSON-compatible representation of the transaction.""" rv = super().to_json() rv["name"] = self.name rv["source"] = self.source rv["sampled"] = self.sampled return rv def get_trace_context(self): # type: () -> Any trace_context = super().get_trace_context() if self._data: trace_context["data"] = self._data return trace_context def get_baggage(self): # type: () -> Baggage """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` associated with the Transaction. The first time a new baggage with Sentry items is made, it will be frozen.""" if not self._baggage or self._baggage.mutable: self._baggage = Baggage.populate_from_transaction(self) return self._baggage def _set_initial_sampling_decision(self, sampling_context): # type: (SamplingContext) -> None """ Sets the transaction's sampling decision, according to the following precedence rules: 1. If a sampling decision is passed to `start_transaction` (`start_transaction(name: "my transaction", sampled: True)`), that decision will be used, regardless of anything else 2. If `traces_sampler` is defined, its decision will be used. It can choose to keep or ignore any parent sampling decision, or use the sampling context data to make its own decision or to choose a sample rate for the transaction. 3. If `traces_sampler` is not defined, but there's a parent sampling decision, the parent sampling decision will be used. 4. If `traces_sampler` is not defined and there's no parent sampling decision, `traces_sample_rate` will be used. """ client = sentry_sdk.get_client() transaction_description = "{op}transaction <{name}>".format( op=("<" + self.op + "> " if self.op else ""), name=self.name ) # nothing to do if tracing is disabled if not has_tracing_enabled(client.options): self.sampled = False return # if the user has forced a sampling decision by passing a `sampled` # value when starting the transaction, go with that if self.sampled is not None: self.sample_rate = float(self.sampled) return # we would have bailed already if neither `traces_sampler` nor # `traces_sample_rate` were defined, so one of these should work; prefer # the hook if so sample_rate = ( client.options["traces_sampler"](sampling_context) if callable(client.options.get("traces_sampler")) else ( # default inheritance behavior sampling_context["parent_sampled"] if sampling_context["parent_sampled"] is not None else client.options["traces_sample_rate"] ) ) # Since this is coming from the user (or from a function provided by the # user), who knows what we might get. (The only valid values are # booleans or numbers between 0 and 1.) if not is_valid_sample_rate(sample_rate, source="Tracing"): logger.warning( "[Tracing] Discarding {transaction_description} because of invalid sample rate.".format( transaction_description=transaction_description, ) ) self.sampled = False return self.sample_rate = float(sample_rate) if client.monitor: self.sample_rate /= 2**client.monitor.downsample_factor # if the function returned 0 (or false), or if `traces_sample_rate` is # 0, it's a sign the transaction should be dropped if not self.sample_rate: logger.debug( "[Tracing] Discarding {transaction_description} because {reason}".format( transaction_description=transaction_description, reason=( "traces_sampler returned 0 or False" if callable(client.options.get("traces_sampler")) else "traces_sample_rate is set to 0" ), ) ) self.sampled = False return # Now we roll the dice. random.random is inclusive of 0, but not of 1, # so strict < is safe here. In case sample_rate is a boolean, cast it # to a float (True becomes 1.0 and False becomes 0.0) self.sampled = random.random() < self.sample_rate if self.sampled: logger.debug( "[Tracing] Starting {transaction_description}".format( transaction_description=transaction_description, ) ) else: logger.debug( "[Tracing] Discarding {transaction_description} because it's not included in the random sample (sampling rate = {sample_rate})".format( transaction_description=transaction_description, sample_rate=self.sample_rate, ) ) class NoOpSpan(Span): def __repr__(self): # type: () -> str return "<%s>" % self.__class__.__name__ @property def containing_transaction(self): # type: () -> Optional[Transaction] return None def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): # type: (str, **Any) -> NoOpSpan return NoOpSpan() def to_traceparent(self): # type: () -> str return "" def to_baggage(self): # type: () -> Optional[Baggage] return None def get_baggage(self): # type: () -> Optional[Baggage] return None def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] return iter(()) def set_tag(self, key, value): # type: (str, Any) -> None pass def set_data(self, key, value): # type: (str, Any) -> None pass def set_status(self, value): # type: (str) -> None pass def set_http_status(self, http_status): # type: (int) -> None pass def is_success(self): # type: () -> bool return True def to_json(self): # type: () -> Dict[str, Any] return {} def get_trace_context(self): # type: () -> Any return {} def get_profile_context(self): # type: () -> Any return {} def finish( self, scope=None, # type: Optional[sentry_sdk.Scope] end_timestamp=None, # type: Optional[Union[float, datetime]] *, hub=None, # type: Optional[sentry_sdk.Hub] ): # type: (...) -> Optional[str] """ The `hub` parameter is deprecated. Please use the `scope` parameter, instead. """ pass def set_measurement(self, name, value, unit=""): # type: (str, float, MeasurementUnit) -> None pass def set_context(self, key, value): # type: (str, Any) -> None pass def init_span_recorder(self, maxlen): # type: (int) -> None pass def _set_initial_sampling_decision(self, sampling_context): # type: (SamplingContext) -> None pass if TYPE_CHECKING: @overload def trace(func=None): # type: (None) -> Callable[[Callable[P, R]], Callable[P, R]] pass @overload def trace(func): # type: (Callable[P, R]) -> Callable[P, R] pass def trace(func=None): # type: (Optional[Callable[P, R]]) -> Union[Callable[P, R], Callable[[Callable[P, R]], Callable[P, R]]] """ Decorator to start a child span under the existing current transaction. If there is no current transaction, then nothing will be traced. .. code-block:: :caption: Usage import sentry_sdk @sentry_sdk.trace def my_function(): ... @sentry_sdk.trace async def my_async_function(): ... """ from sentry_sdk.tracing_utils import start_child_span_decorator # This patterns allows usage of both @sentry_traced and @sentry_traced(...) # See https://stackoverflow.com/questions/52126071/decorator-with-arguments-avoid-parenthesis-when-no-arguments/52126278 if func: return start_child_span_decorator(func) else: return start_child_span_decorator # Circular imports from sentry_sdk.tracing_utils import ( Baggage, EnvironHeaders, extract_sentrytrace_data, has_tracing_enabled, maybe_create_breadcrumbs_from_span, ) with warnings.catch_warnings(): # The code in this file which uses `LocalAggregator` is only called from the deprecated `metrics` module. warnings.simplefilter("ignore", DeprecationWarning) from sentry_sdk.metrics import LocalAggregator sentry-python-2.18.0/sentry_sdk/tracing_utils.py000066400000000000000000000526531471214654000220720ustar00rootroot00000000000000import contextlib import inspect import os import re import sys from collections.abc import Mapping from datetime import timedelta from functools import wraps from urllib.parse import quote, unquote import uuid import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.utils import ( capture_internal_exceptions, filename_for_module, Dsn, logger, match_regex_list, qualname_from_function, to_string, is_sentry_url, _is_external_source, _is_in_project_root, _module_in_list, ) from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Dict from typing import Generator from typing import Optional from typing import Union from types import FrameType SENTRY_TRACE_REGEX = re.compile( "^[ \t]*" # whitespace "([0-9a-f]{32})?" # trace_id "-?([0-9a-f]{16})?" # span_id "-?([01])?" # sampled "[ \t]*$" # whitespace ) # This is a normal base64 regex, modified to reflect that fact that we strip the # trailing = or == off base64_stripped = ( # any of the characters in the base64 "alphabet", in multiples of 4 "([a-zA-Z0-9+/]{4})*" # either nothing or 2 or 3 base64-alphabet characters (see # https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding for # why there's never only 1 extra character) "([a-zA-Z0-9+/]{2,3})?" ) class EnvironHeaders(Mapping): # type: ignore def __init__( self, environ, # type: Mapping[str, str] prefix="HTTP_", # type: str ): # type: (...) -> None self.environ = environ self.prefix = prefix def __getitem__(self, key): # type: (str) -> Optional[Any] return self.environ[self.prefix + key.replace("-", "_").upper()] def __len__(self): # type: () -> int return sum(1 for _ in iter(self)) def __iter__(self): # type: () -> Generator[str, None, None] for k in self.environ: if not isinstance(k, str): continue k = k.replace("-", "_").upper() if not k.startswith(self.prefix): continue yield k[len(self.prefix) :] def has_tracing_enabled(options): # type: (Optional[Dict[str, Any]]) -> bool """ Returns True if either traces_sample_rate or traces_sampler is defined and enable_tracing is set and not false. """ if options is None: return False return bool( options.get("enable_tracing") is not False and ( options.get("traces_sample_rate") is not None or options.get("traces_sampler") is not None ) ) @contextlib.contextmanager def record_sql_queries( cursor, # type: Any query, # type: Any params_list, # type: Any paramstyle, # type: Optional[str] executemany, # type: bool record_cursor_repr=False, # type: bool span_origin="manual", # type: str ): # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None] # TODO: Bring back capturing of params by default if sentry_sdk.get_client().options["_experiments"].get("record_sql_params", False): if not params_list or params_list == [None]: params_list = None if paramstyle == "pyformat": paramstyle = "format" else: params_list = None paramstyle = None query = _format_sql(cursor, query) data = {} if params_list is not None: data["db.params"] = params_list if paramstyle is not None: data["db.paramstyle"] = paramstyle if executemany: data["db.executemany"] = True if record_cursor_repr and cursor is not None: data["db.cursor"] = cursor with capture_internal_exceptions(): sentry_sdk.add_breadcrumb(message=query, category="query", data=data) with sentry_sdk.start_span( op=OP.DB, name=query, origin=span_origin, ) as span: for k, v in data.items(): span.set_data(k, v) yield span def maybe_create_breadcrumbs_from_span(scope, span): # type: (sentry_sdk.Scope, sentry_sdk.tracing.Span) -> None if span.op == OP.DB_REDIS: scope.add_breadcrumb( message=span.description, type="redis", category="redis", data=span._tags ) elif span.op == OP.HTTP_CLIENT: scope.add_breadcrumb(type="http", category="httplib", data=span._data) elif span.op == "subprocess": scope.add_breadcrumb( type="subprocess", category="subprocess", message=span.description, data=span._data, ) def _get_frame_module_abs_path(frame): # type: (FrameType) -> Optional[str] try: return frame.f_code.co_filename except Exception: return None def _should_be_included( is_sentry_sdk_frame, # type: bool namespace, # type: Optional[str] in_app_include, # type: Optional[list[str]] in_app_exclude, # type: Optional[list[str]] abs_path, # type: Optional[str] project_root, # type: Optional[str] ): # type: (...) -> bool # in_app_include takes precedence over in_app_exclude should_be_included = _module_in_list(namespace, in_app_include) should_be_excluded = _is_external_source(abs_path) or _module_in_list( namespace, in_app_exclude ) return not is_sentry_sdk_frame and ( should_be_included or (_is_in_project_root(abs_path, project_root) and not should_be_excluded) ) def add_query_source(span): # type: (sentry_sdk.tracing.Span) -> None """ Adds OTel compatible source code information to the span """ client = sentry_sdk.get_client() if not client.is_active(): return if span.timestamp is None or span.start_timestamp is None: return should_add_query_source = client.options.get("enable_db_query_source", True) if not should_add_query_source: return duration = span.timestamp - span.start_timestamp threshold = client.options.get("db_query_source_threshold_ms", 0) slow_query = duration / timedelta(milliseconds=1) > threshold if not slow_query: return project_root = client.options["project_root"] in_app_include = client.options.get("in_app_include") in_app_exclude = client.options.get("in_app_exclude") # Find the correct frame frame = sys._getframe() # type: Union[FrameType, None] while frame is not None: abs_path = _get_frame_module_abs_path(frame) try: namespace = frame.f_globals.get("__name__") # type: Optional[str] except Exception: namespace = None is_sentry_sdk_frame = namespace is not None and namespace.startswith( "sentry_sdk." ) should_be_included = _should_be_included( is_sentry_sdk_frame=is_sentry_sdk_frame, namespace=namespace, in_app_include=in_app_include, in_app_exclude=in_app_exclude, abs_path=abs_path, project_root=project_root, ) if should_be_included: break frame = frame.f_back else: frame = None # Set the data if frame is not None: try: lineno = frame.f_lineno except Exception: lineno = None if lineno is not None: span.set_data(SPANDATA.CODE_LINENO, frame.f_lineno) try: namespace = frame.f_globals.get("__name__") except Exception: namespace = None if namespace is not None: span.set_data(SPANDATA.CODE_NAMESPACE, namespace) filepath = _get_frame_module_abs_path(frame) if filepath is not None: if namespace is not None: in_app_path = filename_for_module(namespace, filepath) elif project_root is not None and filepath.startswith(project_root): in_app_path = filepath.replace(project_root, "").lstrip(os.sep) else: in_app_path = filepath span.set_data(SPANDATA.CODE_FILEPATH, in_app_path) try: code_function = frame.f_code.co_name except Exception: code_function = None if code_function is not None: span.set_data(SPANDATA.CODE_FUNCTION, frame.f_code.co_name) def extract_sentrytrace_data(header): # type: (Optional[str]) -> Optional[Dict[str, Union[str, bool, None]]] """ Given a `sentry-trace` header string, return a dictionary of data. """ if not header: return None if header.startswith("00-") and header.endswith("-00"): header = header[3:-3] match = SENTRY_TRACE_REGEX.match(header) if not match: return None trace_id, parent_span_id, sampled_str = match.groups() parent_sampled = None if trace_id: trace_id = "{:032x}".format(int(trace_id, 16)) if parent_span_id: parent_span_id = "{:016x}".format(int(parent_span_id, 16)) if sampled_str: parent_sampled = sampled_str != "0" return { "trace_id": trace_id, "parent_span_id": parent_span_id, "parent_sampled": parent_sampled, } def _format_sql(cursor, sql): # type: (Any, str) -> Optional[str] real_sql = None # If we're using psycopg2, it could be that we're # looking at a query that uses Composed objects. Use psycopg2's mogrify # function to format the query. We lose per-parameter trimming but gain # accuracy in formatting. try: if hasattr(cursor, "mogrify"): real_sql = cursor.mogrify(sql) if isinstance(real_sql, bytes): real_sql = real_sql.decode(cursor.connection.encoding) except Exception: real_sql = None return real_sql or to_string(sql) class PropagationContext: """ The PropagationContext represents the data of a trace in Sentry. """ __slots__ = ( "_trace_id", "_span_id", "parent_span_id", "parent_sampled", "dynamic_sampling_context", ) def __init__( self, trace_id=None, # type: Optional[str] span_id=None, # type: Optional[str] parent_span_id=None, # type: Optional[str] parent_sampled=None, # type: Optional[bool] dynamic_sampling_context=None, # type: Optional[Dict[str, str]] ): # type: (...) -> None self._trace_id = trace_id """The trace id of the Sentry trace.""" self._span_id = span_id """The span id of the currently executing span.""" self.parent_span_id = parent_span_id """The id of the parent span that started this span. The parent span could also be a span in an upstream service.""" self.parent_sampled = parent_sampled """Boolean indicator if the parent span was sampled. Important when the parent span originated in an upstream service, because we watn to sample the whole trace, or nothing from the trace.""" self.dynamic_sampling_context = dynamic_sampling_context """Data that is used for dynamic sampling decisions.""" @classmethod def from_incoming_data(cls, incoming_data): # type: (Dict[str, Any]) -> Optional[PropagationContext] propagation_context = None normalized_data = normalize_incoming_data(incoming_data) baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME) if baggage_header: propagation_context = PropagationContext() propagation_context.dynamic_sampling_context = Baggage.from_incoming_header( baggage_header ).dynamic_sampling_context() sentry_trace_header = normalized_data.get(SENTRY_TRACE_HEADER_NAME) if sentry_trace_header: sentrytrace_data = extract_sentrytrace_data(sentry_trace_header) if sentrytrace_data is not None: if propagation_context is None: propagation_context = PropagationContext() propagation_context.update(sentrytrace_data) return propagation_context @property def trace_id(self): # type: () -> str """The trace id of the Sentry trace.""" if not self._trace_id: self._trace_id = uuid.uuid4().hex return self._trace_id @trace_id.setter def trace_id(self, value): # type: (str) -> None self._trace_id = value @property def span_id(self): # type: () -> str """The span id of the currently executed span.""" if not self._span_id: self._span_id = uuid.uuid4().hex[16:] return self._span_id @span_id.setter def span_id(self, value): # type: (str) -> None self._span_id = value def update(self, other_dict): # type: (Dict[str, Any]) -> None """ Updates the PropagationContext with data from the given dictionary. """ for key, value in other_dict.items(): try: setattr(self, key, value) except AttributeError: pass def __repr__(self): # type: (...) -> str return "".format( self._trace_id, self._span_id, self.parent_span_id, self.parent_sampled, self.dynamic_sampling_context, ) class Baggage: """ The W3C Baggage header information (see https://www.w3.org/TR/baggage/). """ __slots__ = ("sentry_items", "third_party_items", "mutable") SENTRY_PREFIX = "sentry-" SENTRY_PREFIX_REGEX = re.compile("^sentry-") def __init__( self, sentry_items, # type: Dict[str, str] third_party_items="", # type: str mutable=True, # type: bool ): self.sentry_items = sentry_items self.third_party_items = third_party_items self.mutable = mutable @classmethod def from_incoming_header(cls, header): # type: (Optional[str]) -> Baggage """ freeze if incoming header already has sentry baggage """ sentry_items = {} third_party_items = "" mutable = True if header: for item in header.split(","): if "=" not in item: continue with capture_internal_exceptions(): item = item.strip() key, val = item.split("=") if Baggage.SENTRY_PREFIX_REGEX.match(key): baggage_key = unquote(key.split("-")[1]) sentry_items[baggage_key] = unquote(val) mutable = False else: third_party_items += ("," if third_party_items else "") + item return Baggage(sentry_items, third_party_items, mutable) @classmethod def from_options(cls, scope): # type: (sentry_sdk.scope.Scope) -> Optional[Baggage] sentry_items = {} # type: Dict[str, str] third_party_items = "" mutable = False client = sentry_sdk.get_client() if not client.is_active() or scope._propagation_context is None: return Baggage(sentry_items) options = client.options propagation_context = scope._propagation_context if propagation_context is not None: sentry_items["trace_id"] = propagation_context.trace_id if options.get("environment"): sentry_items["environment"] = options["environment"] if options.get("release"): sentry_items["release"] = options["release"] if options.get("dsn"): sentry_items["public_key"] = Dsn(options["dsn"]).public_key if options.get("traces_sample_rate"): sentry_items["sample_rate"] = str(options["traces_sample_rate"]) return Baggage(sentry_items, third_party_items, mutable) @classmethod def populate_from_transaction(cls, transaction): # type: (sentry_sdk.tracing.Transaction) -> Baggage """ Populate fresh baggage entry with sentry_items and make it immutable if this is the head SDK which originates traces. """ client = sentry_sdk.get_client() sentry_items = {} # type: Dict[str, str] if not client.is_active(): return Baggage(sentry_items) options = client.options or {} sentry_items["trace_id"] = transaction.trace_id if options.get("environment"): sentry_items["environment"] = options["environment"] if options.get("release"): sentry_items["release"] = options["release"] if options.get("dsn"): sentry_items["public_key"] = Dsn(options["dsn"]).public_key if ( transaction.name and transaction.source not in LOW_QUALITY_TRANSACTION_SOURCES ): sentry_items["transaction"] = transaction.name if transaction.sample_rate is not None: sentry_items["sample_rate"] = str(transaction.sample_rate) if transaction.sampled is not None: sentry_items["sampled"] = "true" if transaction.sampled else "false" # there's an existing baggage but it was mutable, # which is why we are creating this new baggage. # However, if by chance the user put some sentry items in there, give them precedence. if transaction._baggage and transaction._baggage.sentry_items: sentry_items.update(transaction._baggage.sentry_items) return Baggage(sentry_items, mutable=False) def freeze(self): # type: () -> None self.mutable = False def dynamic_sampling_context(self): # type: () -> Dict[str, str] header = {} for key, item in self.sentry_items.items(): header[key] = item return header def serialize(self, include_third_party=False): # type: (bool) -> str items = [] for key, val in self.sentry_items.items(): with capture_internal_exceptions(): item = Baggage.SENTRY_PREFIX + quote(key) + "=" + quote(str(val)) items.append(item) if include_third_party: items.append(self.third_party_items) return ",".join(items) def should_propagate_trace(client, url): # type: (sentry_sdk.client.BaseClient, str) -> bool """ Returns True if url matches trace_propagation_targets configured in the given client. Otherwise, returns False. """ trace_propagation_targets = client.options["trace_propagation_targets"] if is_sentry_url(client, url): return False return match_regex_list(url, trace_propagation_targets, substring_matching=True) def normalize_incoming_data(incoming_data): # type: (Dict[str, Any]) -> Dict[str, Any] """ Normalizes incoming data so the keys are all lowercase with dashes instead of underscores and stripped from known prefixes. """ data = {} for key, value in incoming_data.items(): if key.startswith("HTTP_"): key = key[5:] key = key.replace("_", "-").lower() data[key] = value return data def start_child_span_decorator(func): # type: (Any) -> Any """ Decorator to add child spans for functions. See also ``sentry_sdk.tracing.trace()``. """ # Asynchronous case if inspect.iscoroutinefunction(func): @wraps(func) async def func_with_tracing(*args, **kwargs): # type: (*Any, **Any) -> Any span = get_current_span() if span is None: logger.debug( "Cannot create a child span for %s. " "Please start a Sentry transaction before calling this function.", qualname_from_function(func), ) return await func(*args, **kwargs) with span.start_child( op=OP.FUNCTION, name=qualname_from_function(func), ): return await func(*args, **kwargs) try: func_with_tracing.__signature__ = inspect.signature(func) # type: ignore[attr-defined] except Exception: pass # Synchronous case else: @wraps(func) def func_with_tracing(*args, **kwargs): # type: (*Any, **Any) -> Any span = get_current_span() if span is None: logger.debug( "Cannot create a child span for %s. " "Please start a Sentry transaction before calling this function.", qualname_from_function(func), ) return func(*args, **kwargs) with span.start_child( op=OP.FUNCTION, name=qualname_from_function(func), ): return func(*args, **kwargs) try: func_with_tracing.__signature__ = inspect.signature(func) # type: ignore[attr-defined] except Exception: pass return func_with_tracing def get_current_span(scope=None): # type: (Optional[sentry_sdk.Scope]) -> Optional[Span] """ Returns the currently active span if there is one running, otherwise `None` """ scope = scope or sentry_sdk.get_current_scope() current_span = scope.span return current_span # Circular imports from sentry_sdk.tracing import ( BAGGAGE_HEADER_NAME, LOW_QUALITY_TRANSACTION_SOURCES, SENTRY_TRACE_HEADER_NAME, ) if TYPE_CHECKING: from sentry_sdk.tracing import Span sentry-python-2.18.0/sentry_sdk/transport.py000066400000000000000000000766701471214654000212640ustar00rootroot00000000000000from abc import ABC, abstractmethod import io import os import gzip import socket import ssl import time import warnings from datetime import datetime, timedelta, timezone from collections import defaultdict from urllib.request import getproxies try: import brotli # type: ignore except ImportError: brotli = None import urllib3 import certifi import sentry_sdk from sentry_sdk.consts import EndpointType from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions from sentry_sdk.worker import BackgroundWorker from sentry_sdk.envelope import Envelope, Item, PayloadRef from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any from typing import Callable from typing import Dict from typing import DefaultDict from typing import Iterable from typing import List from typing import Mapping from typing import Optional from typing import Self from typing import Tuple from typing import Type from typing import Union from urllib3.poolmanager import PoolManager from urllib3.poolmanager import ProxyManager from sentry_sdk._types import Event, EventDataCategory KEEP_ALIVE_SOCKET_OPTIONS = [] for option in [ (socket.SOL_SOCKET, lambda: getattr(socket, "SO_KEEPALIVE"), 1), # noqa: B009 (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPIDLE"), 45), # noqa: B009 (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPINTVL"), 10), # noqa: B009 (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPCNT"), 6), # noqa: B009 ]: try: KEEP_ALIVE_SOCKET_OPTIONS.append((option[0], option[1](), option[2])) except AttributeError: # a specific option might not be available on specific systems, # e.g. TCP_KEEPIDLE doesn't exist on macOS pass class Transport(ABC): """Baseclass for all transports. A transport is used to send an event to sentry. """ parsed_dsn = None # type: Optional[Dsn] def __init__(self, options=None): # type: (Self, Optional[Dict[str, Any]]) -> None self.options = options if options and options["dsn"] is not None and options["dsn"]: self.parsed_dsn = Dsn(options["dsn"]) else: self.parsed_dsn = None def capture_event(self, event): # type: (Self, Event) -> None """ DEPRECATED: Please use capture_envelope instead. This gets invoked with the event dictionary when an event should be sent to sentry. """ warnings.warn( "capture_event is deprecated, please use capture_envelope instead!", DeprecationWarning, stacklevel=2, ) envelope = Envelope() envelope.add_event(event) self.capture_envelope(envelope) @abstractmethod def capture_envelope(self, envelope): # type: (Self, Envelope) -> None """ Send an envelope to Sentry. Envelopes are a data container format that can hold any type of data submitted to Sentry. We use it to send all event data (including errors, transactions, crons check-ins, etc.) to Sentry. """ pass def flush( self, timeout, callback=None, ): # type: (Self, float, Optional[Any]) -> None """ Wait `timeout` seconds for the current events to be sent out. The default implementation is a no-op, since this method may only be relevant to some transports. Subclasses should override this method if necessary. """ return None def kill(self): # type: (Self) -> None """ Forcefully kills the transport. The default implementation is a no-op, since this method may only be relevant to some transports. Subclasses should override this method if necessary. """ return None def record_lost_event( self, reason, # type: str data_category=None, # type: Optional[EventDataCategory] item=None, # type: Optional[Item] *, quantity=1, # type: int ): # type: (...) -> None """This increments a counter for event loss by reason and data category by the given positive-int quantity (default 1). If an item is provided, the data category and quantity are extracted from the item, and the values passed for data_category and quantity are ignored. When recording a lost transaction via data_category="transaction", the calling code should also record the lost spans via this method. When recording lost spans, `quantity` should be set to the number of contained spans, plus one for the transaction itself. When passing an Item containing a transaction via the `item` parameter, this method automatically records the lost spans. """ return None def is_healthy(self): # type: (Self) -> bool return True def __del__(self): # type: (Self) -> None try: self.kill() except Exception: pass def _parse_rate_limits(header, now=None): # type: (str, Optional[datetime]) -> Iterable[Tuple[Optional[EventDataCategory], datetime]] if now is None: now = datetime.now(timezone.utc) for limit in header.split(","): try: parameters = limit.strip().split(":") retry_after_val, categories = parameters[:2] retry_after = now + timedelta(seconds=int(retry_after_val)) for category in categories and categories.split(";") or (None,): if category == "metric_bucket": try: namespaces = parameters[4].split(";") except IndexError: namespaces = [] if not namespaces or "custom" in namespaces: yield category, retry_after # type: ignore else: yield category, retry_after # type: ignore except (LookupError, ValueError): continue class BaseHttpTransport(Transport): """The base HTTP transport.""" def __init__(self, options): # type: (Self, Dict[str, Any]) -> None from sentry_sdk.consts import VERSION Transport.__init__(self, options) assert self.parsed_dsn is not None self.options = options # type: Dict[str, Any] self._worker = BackgroundWorker(queue_size=options["transport_queue_size"]) self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) self._disabled_until = {} # type: Dict[Optional[EventDataCategory], datetime] # We only use this Retry() class for the `get_retry_after` method it exposes self._retry = urllib3.util.Retry() self._discarded_events = defaultdict( int ) # type: DefaultDict[Tuple[EventDataCategory, str], int] self._last_client_report_sent = time.time() self._pool = self._make_pool() # Backwards compatibility for deprecated `self.hub_class` attribute self._hub_cls = sentry_sdk.Hub experiments = options.get("_experiments", {}) compression_level = experiments.get( "transport_compression_level", experiments.get("transport_zlib_compression_level"), ) compression_algo = experiments.get( "transport_compression_algo", ( "gzip" # if only compression level is set, assume gzip for backwards compatibility # if we don't have brotli available, fallback to gzip if compression_level is not None or brotli is None else "br" ), ) if compression_algo == "br" and brotli is None: logger.warning( "You asked for brotli compression without the Brotli module, falling back to gzip -9" ) compression_algo = "gzip" compression_level = None if compression_algo not in ("br", "gzip"): logger.warning( "Unknown compression algo %s, disabling compression", compression_algo ) self._compression_level = 0 self._compression_algo = None else: self._compression_algo = compression_algo if compression_level is not None: self._compression_level = compression_level elif self._compression_algo == "gzip": self._compression_level = 9 elif self._compression_algo == "br": self._compression_level = 4 def record_lost_event( self, reason, # type: str data_category=None, # type: Optional[EventDataCategory] item=None, # type: Optional[Item] *, quantity=1, # type: int ): # type: (...) -> None if not self.options["send_client_reports"]: return if item is not None: data_category = item.data_category quantity = 1 # If an item is provided, we always count it as 1 (except for attachments, handled below). if data_category == "transaction": # Also record the lost spans event = item.get_transaction_event() or {} # +1 for the transaction itself span_count = len(event.get("spans") or []) + 1 self.record_lost_event(reason, "span", quantity=span_count) elif data_category == "attachment": # quantity of 0 is actually 1 as we do not want to count # empty attachments as actually empty. quantity = len(item.get_bytes()) or 1 elif data_category is None: raise TypeError("data category not provided") self._discarded_events[data_category, reason] += quantity def _get_header_value(self, response, header): # type: (Self, Any, str) -> Optional[str] return response.headers.get(header) def _update_rate_limits(self, response): # type: (Self, Union[urllib3.BaseHTTPResponse, httpcore.Response]) -> None # new sentries with more rate limit insights. We honor this header # no matter of the status code to update our internal rate limits. header = self._get_header_value(response, "x-sentry-rate-limits") if header: logger.warning("Rate-limited via x-sentry-rate-limits") self._disabled_until.update(_parse_rate_limits(header)) # old sentries only communicate global rate limit hits via the # retry-after header on 429. This header can also be emitted on new # sentries if a proxy in front wants to globally slow things down. elif response.status == 429: logger.warning("Rate-limited via 429") retry_after_value = self._get_header_value(response, "Retry-After") retry_after = ( self._retry.parse_retry_after(retry_after_value) if retry_after_value is not None else None ) or 60 self._disabled_until[None] = datetime.now(timezone.utc) + timedelta( seconds=retry_after ) def _send_request( self, body, headers, endpoint_type=EndpointType.ENVELOPE, envelope=None, ): # type: (Self, bytes, Dict[str, str], EndpointType, Optional[Envelope]) -> None def record_loss(reason): # type: (str) -> None if envelope is None: self.record_lost_event(reason, data_category="error") else: for item in envelope.items: self.record_lost_event(reason, item=item) headers.update( { "User-Agent": str(self._auth.client), "X-Sentry-Auth": str(self._auth.to_header()), } ) try: response = self._request( "POST", endpoint_type, body, headers, ) except Exception: self.on_dropped_event("network") record_loss("network_error") raise try: self._update_rate_limits(response) if response.status == 429: # if we hit a 429. Something was rate limited but we already # acted on this in `self._update_rate_limits`. Note that we # do not want to record event loss here as we will have recorded # an outcome in relay already. self.on_dropped_event("status_429") pass elif response.status >= 300 or response.status < 200: logger.error( "Unexpected status code: %s (body: %s)", response.status, getattr(response, "data", getattr(response, "content", None)), ) self.on_dropped_event("status_{}".format(response.status)) record_loss("network_error") finally: response.close() def on_dropped_event(self, _reason): # type: (Self, str) -> None return None def _fetch_pending_client_report(self, force=False, interval=60): # type: (Self, bool, int) -> Optional[Item] if not self.options["send_client_reports"]: return None if not (force or self._last_client_report_sent < time.time() - interval): return None discarded_events = self._discarded_events self._discarded_events = defaultdict(int) self._last_client_report_sent = time.time() if not discarded_events: return None return Item( PayloadRef( json={ "timestamp": time.time(), "discarded_events": [ {"reason": reason, "category": category, "quantity": quantity} for ( (category, reason), quantity, ) in discarded_events.items() ], } ), type="client_report", ) def _flush_client_reports(self, force=False): # type: (Self, bool) -> None client_report = self._fetch_pending_client_report(force=force, interval=60) if client_report is not None: self.capture_envelope(Envelope(items=[client_report])) def _check_disabled(self, category): # type: (str) -> bool def _disabled(bucket): # type: (Any) -> bool # The envelope item type used for metrics is statsd # whereas the rate limit category is metric_bucket if bucket == "statsd": bucket = "metric_bucket" ts = self._disabled_until.get(bucket) return ts is not None and ts > datetime.now(timezone.utc) return _disabled(category) or _disabled(None) def _is_rate_limited(self): # type: (Self) -> bool return any( ts > datetime.now(timezone.utc) for ts in self._disabled_until.values() ) def _is_worker_full(self): # type: (Self) -> bool return self._worker.full() def is_healthy(self): # type: (Self) -> bool return not (self._is_worker_full() or self._is_rate_limited()) def _send_envelope(self, envelope): # type: (Self, Envelope) -> None # remove all items from the envelope which are over quota new_items = [] for item in envelope.items: if self._check_disabled(item.data_category): if item.data_category in ("transaction", "error", "default", "statsd"): self.on_dropped_event("self_rate_limits") self.record_lost_event("ratelimit_backoff", item=item) else: new_items.append(item) # Since we're modifying the envelope here make a copy so that others # that hold references do not see their envelope modified. envelope = Envelope(headers=envelope.headers, items=new_items) if not envelope.items: return None # since we're already in the business of sending out an envelope here # check if we have one pending for the stats session envelopes so we # can attach it to this enveloped scheduled for sending. This will # currently typically attach the client report to the most recent # session update. client_report_item = self._fetch_pending_client_report(interval=30) if client_report_item is not None: envelope.items.append(client_report_item) content_encoding, body = self._serialize_envelope(envelope) assert self.parsed_dsn is not None logger.debug( "Sending envelope [%s] project:%s host:%s", envelope.description, self.parsed_dsn.project_id, self.parsed_dsn.host, ) headers = { "Content-Type": "application/x-sentry-envelope", } if content_encoding: headers["Content-Encoding"] = content_encoding self._send_request( body.getvalue(), headers=headers, endpoint_type=EndpointType.ENVELOPE, envelope=envelope, ) return None def _serialize_envelope(self, envelope): # type: (Self, Envelope) -> tuple[Optional[str], io.BytesIO] content_encoding = None body = io.BytesIO() if self._compression_level == 0 or self._compression_algo is None: envelope.serialize_into(body) else: content_encoding = self._compression_algo if self._compression_algo == "br" and brotli is not None: body.write( brotli.compress( envelope.serialize(), quality=self._compression_level ) ) else: # assume gzip as we sanitize the algo value in init with gzip.GzipFile( fileobj=body, mode="w", compresslevel=self._compression_level ) as f: envelope.serialize_into(f) return content_encoding, body def _get_pool_options(self): # type: (Self) -> Dict[str, Any] raise NotImplementedError() def _in_no_proxy(self, parsed_dsn): # type: (Self, Dsn) -> bool no_proxy = getproxies().get("no") if not no_proxy: return False for host in no_proxy.split(","): host = host.strip() if parsed_dsn.host.endswith(host) or parsed_dsn.netloc.endswith(host): return True return False def _make_pool(self): # type: (Self) -> Union[PoolManager, ProxyManager, httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool] raise NotImplementedError() def _request( self, method, endpoint_type, body, headers, ): # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> Union[urllib3.BaseHTTPResponse, httpcore.Response] raise NotImplementedError() def capture_envelope( self, envelope # type: Envelope ): # type: (...) -> None def send_envelope_wrapper(): # type: () -> None with capture_internal_exceptions(): self._send_envelope(envelope) self._flush_client_reports() if not self._worker.submit(send_envelope_wrapper): self.on_dropped_event("full_queue") for item in envelope.items: self.record_lost_event("queue_overflow", item=item) def flush( self, timeout, callback=None, ): # type: (Self, float, Optional[Callable[[int, float], None]]) -> None logger.debug("Flushing HTTP transport") if timeout > 0: self._worker.submit(lambda: self._flush_client_reports(force=True)) self._worker.flush(timeout, callback) def kill(self): # type: (Self) -> None logger.debug("Killing HTTP transport") self._worker.kill() @staticmethod def _warn_hub_cls(): # type: () -> None """Convenience method to warn users about the deprecation of the `hub_cls` attribute.""" warnings.warn( "The `hub_cls` attribute is deprecated and will be removed in a future release.", DeprecationWarning, stacklevel=3, ) @property def hub_cls(self): # type: (Self) -> type[sentry_sdk.Hub] """DEPRECATED: This attribute is deprecated and will be removed in a future release.""" HttpTransport._warn_hub_cls() return self._hub_cls @hub_cls.setter def hub_cls(self, value): # type: (Self, type[sentry_sdk.Hub]) -> None """DEPRECATED: This attribute is deprecated and will be removed in a future release.""" HttpTransport._warn_hub_cls() self._hub_cls = value class HttpTransport(BaseHttpTransport): if TYPE_CHECKING: _pool: Union[PoolManager, ProxyManager] def _get_pool_options(self): # type: (Self) -> Dict[str, Any] num_pools = self.options.get("_experiments", {}).get("transport_num_pools") options = { "num_pools": 2 if num_pools is None else int(num_pools), "cert_reqs": "CERT_REQUIRED", } socket_options = None # type: Optional[List[Tuple[int, int, int | bytes]]] if self.options["socket_options"] is not None: socket_options = self.options["socket_options"] if self.options["keep_alive"]: if socket_options is None: socket_options = [] used_options = {(o[0], o[1]) for o in socket_options} for default_option in KEEP_ALIVE_SOCKET_OPTIONS: if (default_option[0], default_option[1]) not in used_options: socket_options.append(default_option) if socket_options is not None: options["socket_options"] = socket_options options["ca_certs"] = ( self.options["ca_certs"] # User-provided bundle from the SDK init or os.environ.get("SSL_CERT_FILE") or os.environ.get("REQUESTS_CA_BUNDLE") or certifi.where() ) options["cert_file"] = self.options["cert_file"] or os.environ.get( "CLIENT_CERT_FILE" ) options["key_file"] = self.options["key_file"] or os.environ.get( "CLIENT_KEY_FILE" ) return options def _make_pool(self): # type: (Self) -> Union[PoolManager, ProxyManager] if self.parsed_dsn is None: raise ValueError("Cannot create HTTP-based transport without valid DSN") proxy = None no_proxy = self._in_no_proxy(self.parsed_dsn) # try HTTPS first https_proxy = self.options["https_proxy"] if self.parsed_dsn.scheme == "https" and (https_proxy != ""): proxy = https_proxy or (not no_proxy and getproxies().get("https")) # maybe fallback to HTTP proxy http_proxy = self.options["http_proxy"] if not proxy and (http_proxy != ""): proxy = http_proxy or (not no_proxy and getproxies().get("http")) opts = self._get_pool_options() if proxy: proxy_headers = self.options["proxy_headers"] if proxy_headers: opts["proxy_headers"] = proxy_headers if proxy.startswith("socks"): use_socks_proxy = True try: # Check if PySocks dependency is available from urllib3.contrib.socks import SOCKSProxyManager except ImportError: use_socks_proxy = False logger.warning( "You have configured a SOCKS proxy (%s) but support for SOCKS proxies is not installed. Disabling proxy support. Please add `PySocks` (or `urllib3` with the `[socks]` extra) to your dependencies.", proxy, ) if use_socks_proxy: return SOCKSProxyManager(proxy, **opts) else: return urllib3.PoolManager(**opts) else: return urllib3.ProxyManager(proxy, **opts) else: return urllib3.PoolManager(**opts) def _request( self, method, endpoint_type, body, headers, ): # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> urllib3.BaseHTTPResponse return self._pool.request( method, self._auth.get_api_url(endpoint_type), body=body, headers=headers, ) try: import httpcore import h2 # type: ignore # noqa: F401 except ImportError: # Sorry, no Http2Transport for you class Http2Transport(HttpTransport): def __init__(self, options): # type: (Self, Dict[str, Any]) -> None super().__init__(options) logger.warning( "You tried to use HTTP2Transport but don't have httpcore[http2] installed. Falling back to HTTPTransport." ) else: class Http2Transport(BaseHttpTransport): # type: ignore """The HTTP2 transport based on httpcore.""" if TYPE_CHECKING: _pool: Union[ httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool ] def _get_header_value(self, response, header): # type: (Self, httpcore.Response, str) -> Optional[str] return next( ( val.decode("ascii") for key, val in response.headers if key.decode("ascii").lower() == header ), None, ) def _request( self, method, endpoint_type, body, headers, ): # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> httpcore.Response response = self._pool.request( method, self._auth.get_api_url(endpoint_type), content=body, headers=headers, # type: ignore ) return response def _get_pool_options(self): # type: (Self) -> Dict[str, Any] options = { "http2": self.parsed_dsn is not None and self.parsed_dsn.scheme == "https", "retries": 3, } # type: Dict[str, Any] socket_options = ( self.options["socket_options"] if self.options["socket_options"] is not None else [] ) used_options = {(o[0], o[1]) for o in socket_options} for default_option in KEEP_ALIVE_SOCKET_OPTIONS: if (default_option[0], default_option[1]) not in used_options: socket_options.append(default_option) options["socket_options"] = socket_options ssl_context = ssl.create_default_context() ssl_context.load_verify_locations( self.options["ca_certs"] # User-provided bundle from the SDK init or os.environ.get("SSL_CERT_FILE") or os.environ.get("REQUESTS_CA_BUNDLE") or certifi.where() ) cert_file = self.options["cert_file"] or os.environ.get("CLIENT_CERT_FILE") key_file = self.options["key_file"] or os.environ.get("CLIENT_KEY_FILE") if cert_file is not None: ssl_context.load_cert_chain(cert_file, key_file) options["ssl_context"] = ssl_context return options def _make_pool(self): # type: (Self) -> Union[httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool] if self.parsed_dsn is None: raise ValueError("Cannot create HTTP-based transport without valid DSN") proxy = None no_proxy = self._in_no_proxy(self.parsed_dsn) # try HTTPS first https_proxy = self.options["https_proxy"] if self.parsed_dsn.scheme == "https" and (https_proxy != ""): proxy = https_proxy or (not no_proxy and getproxies().get("https")) # maybe fallback to HTTP proxy http_proxy = self.options["http_proxy"] if not proxy and (http_proxy != ""): proxy = http_proxy or (not no_proxy and getproxies().get("http")) opts = self._get_pool_options() if proxy: proxy_headers = self.options["proxy_headers"] if proxy_headers: opts["proxy_headers"] = proxy_headers if proxy.startswith("socks"): try: if "socket_options" in opts: socket_options = opts.pop("socket_options") if socket_options: logger.warning( "You have defined socket_options but using a SOCKS proxy which doesn't support these. We'll ignore socket_options." ) return httpcore.SOCKSProxy(proxy_url=proxy, **opts) except RuntimeError: logger.warning( "You have configured a SOCKS proxy (%s) but support for SOCKS proxies is not installed. Disabling proxy support.", proxy, ) else: return httpcore.HTTPProxy(proxy_url=proxy, **opts) return httpcore.ConnectionPool(**opts) class _FunctionTransport(Transport): """ DEPRECATED: Users wishing to provide a custom transport should subclass the Transport class, rather than providing a function. """ def __init__( self, func # type: Callable[[Event], None] ): # type: (...) -> None Transport.__init__(self) self._func = func def capture_event( self, event # type: Event ): # type: (...) -> None self._func(event) return None def capture_envelope(self, envelope: Envelope) -> None: # Since function transports expect to be called with an event, we need # to iterate over the envelope and call the function for each event, via # the deprecated capture_event method. event = envelope.get_event() if event is not None: self.capture_event(event) def make_transport(options): # type: (Dict[str, Any]) -> Optional[Transport] ref_transport = options["transport"] use_http2_transport = options.get("_experiments", {}).get("transport_http2", False) # By default, we use the http transport class transport_cls = ( Http2Transport if use_http2_transport else HttpTransport ) # type: Type[Transport] if isinstance(ref_transport, Transport): return ref_transport elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport): transport_cls = ref_transport elif callable(ref_transport): warnings.warn( "Function transports are deprecated and will be removed in a future release." "Please provide a Transport instance or subclass, instead.", DeprecationWarning, stacklevel=2, ) return _FunctionTransport(ref_transport) # if a transport class is given only instantiate it if the dsn is not # empty or None if options["dsn"]: return transport_cls(options) return None sentry-python-2.18.0/sentry_sdk/types.py000066400000000000000000000014401471214654000203530ustar00rootroot00000000000000""" This module contains type definitions for the Sentry SDK's public API. The types are re-exported from the internal module `sentry_sdk._types`. Disclaimer: Since types are a form of documentation, type definitions may change in minor releases. Removing a type would be considered a breaking change, and so we will only remove type definitions in major releases. """ from typing import TYPE_CHECKING if TYPE_CHECKING: from sentry_sdk._types import Event, EventDataCategory, Hint else: from typing import Any # The lines below allow the types to be imported from outside `if TYPE_CHECKING` # guards. The types in this module are only intended to be used for type hints. Event = Any EventDataCategory = Any Hint = Any __all__ = ("Event", "EventDataCategory", "Hint") sentry-python-2.18.0/sentry_sdk/utils.py000066400000000000000000001603431471214654000203570ustar00rootroot00000000000000import base64 import json import linecache import logging import math import os import random import re import subprocess import sys import threading import time from collections import namedtuple from datetime import datetime, timezone from decimal import Decimal from functools import partial, partialmethod, wraps from numbers import Real from urllib.parse import parse_qs, unquote, urlencode, urlsplit, urlunsplit try: # Python 3.11 from builtins import BaseExceptionGroup except ImportError: # Python 3.10 and below BaseExceptionGroup = None # type: ignore import sentry_sdk from sentry_sdk._compat import PY37 from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, EndpointType from typing import TYPE_CHECKING if TYPE_CHECKING: from types import FrameType, TracebackType from typing import ( Any, Callable, cast, ContextManager, Dict, Iterator, List, NoReturn, Optional, overload, ParamSpec, Set, Tuple, Type, TypeVar, Union, ) from gevent.hub import Hub from sentry_sdk._types import Event, ExcInfo P = ParamSpec("P") R = TypeVar("R") epoch = datetime(1970, 1, 1) # The logger is created here but initialized in the debug support module logger = logging.getLogger("sentry_sdk.errors") _installed_modules = None BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$") SENSITIVE_DATA_SUBSTITUTE = "[Filtered]" FALSY_ENV_VALUES = frozenset(("false", "f", "n", "no", "off", "0")) TRUTHY_ENV_VALUES = frozenset(("true", "t", "y", "yes", "on", "1")) def env_to_bool(value, *, strict=False): # type: (Any, Optional[bool]) -> bool | None """Casts an ENV variable value to boolean using the constants defined above. In strict mode, it may return None if the value doesn't match any of the predefined values. """ normalized = str(value).lower() if value is not None else None if normalized in FALSY_ENV_VALUES: return False if normalized in TRUTHY_ENV_VALUES: return True return None if strict else bool(value) def json_dumps(data): # type: (Any) -> bytes """Serialize data into a compact JSON representation encoded as UTF-8.""" return json.dumps(data, allow_nan=False, separators=(",", ":")).encode("utf-8") def get_git_revision(): # type: () -> Optional[str] try: with open(os.path.devnull, "w+") as null: # prevent command prompt windows from popping up on windows startupinfo = None if sys.platform == "win32" or sys.platform == "cygwin": startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW revision = ( subprocess.Popen( ["git", "rev-parse", "HEAD"], startupinfo=startupinfo, stdout=subprocess.PIPE, stderr=null, stdin=null, ) .communicate()[0] .strip() .decode("utf-8") ) except (OSError, IOError, FileNotFoundError): return None return revision def get_default_release(): # type: () -> Optional[str] """Try to guess a default release.""" release = os.environ.get("SENTRY_RELEASE") if release: return release release = get_git_revision() if release: return release for var in ( "HEROKU_SLUG_COMMIT", "SOURCE_VERSION", "CODEBUILD_RESOLVED_SOURCE_VERSION", "CIRCLE_SHA1", "GAE_DEPLOYMENT_ID", ): release = os.environ.get(var) if release: return release return None def get_sdk_name(installed_integrations): # type: (List[str]) -> str """Return the SDK name including the name of the used web framework.""" # Note: I can not use for example sentry_sdk.integrations.django.DjangoIntegration.identifier # here because if django is not installed the integration is not accessible. framework_integrations = [ "django", "flask", "fastapi", "bottle", "falcon", "quart", "sanic", "starlette", "litestar", "starlite", "chalice", "serverless", "pyramid", "tornado", "aiohttp", "aws_lambda", "gcp", "beam", "asgi", "wsgi", ] for integration in framework_integrations: if integration in installed_integrations: return "sentry.python.{}".format(integration) return "sentry.python" class CaptureInternalException: __slots__ = () def __enter__(self): # type: () -> ContextManager[Any] return self def __exit__(self, ty, value, tb): # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> bool if ty is not None and value is not None: capture_internal_exception((ty, value, tb)) return True _CAPTURE_INTERNAL_EXCEPTION = CaptureInternalException() def capture_internal_exceptions(): # type: () -> ContextManager[Any] return _CAPTURE_INTERNAL_EXCEPTION def capture_internal_exception(exc_info): # type: (ExcInfo) -> None """ Capture an exception that is likely caused by a bug in the SDK itself. These exceptions do not end up in Sentry and are just logged instead. """ if sentry_sdk.get_client().is_active(): logger.error("Internal error in sentry_sdk", exc_info=exc_info) def to_timestamp(value): # type: (datetime) -> float return (value - epoch).total_seconds() def format_timestamp(value): # type: (datetime) -> str """Formats a timestamp in RFC 3339 format. Any datetime objects with a non-UTC timezone are converted to UTC, so that all timestamps are formatted in UTC. """ utctime = value.astimezone(timezone.utc) # We use this custom formatting rather than isoformat for backwards compatibility (we have used this format for # several years now), and isoformat is slightly different. return utctime.strftime("%Y-%m-%dT%H:%M:%S.%fZ") ISO_TZ_SEPARATORS = frozenset(("+", "-")) def datetime_from_isoformat(value): # type: (str) -> datetime try: result = datetime.fromisoformat(value) except (AttributeError, ValueError): # py 3.6 timestamp_format = ( "%Y-%m-%dT%H:%M:%S.%f" if "." in value else "%Y-%m-%dT%H:%M:%S" ) if value.endswith("Z"): value = value[:-1] + "+0000" if value[-6] in ISO_TZ_SEPARATORS: timestamp_format += "%z" value = value[:-3] + value[-2:] elif value[-5] in ISO_TZ_SEPARATORS: timestamp_format += "%z" result = datetime.strptime(value, timestamp_format) return result.astimezone(timezone.utc) def event_hint_with_exc_info(exc_info=None): # type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]] """Creates a hint with the exc info filled in.""" if exc_info is None: exc_info = sys.exc_info() else: exc_info = exc_info_from_error(exc_info) if exc_info[0] is None: exc_info = None return {"exc_info": exc_info} class BadDsn(ValueError): """Raised on invalid DSNs.""" class Dsn: """Represents a DSN.""" def __init__(self, value): # type: (Union[Dsn, str]) -> None if isinstance(value, Dsn): self.__dict__ = dict(value.__dict__) return parts = urlsplit(str(value)) if parts.scheme not in ("http", "https"): raise BadDsn("Unsupported scheme %r" % parts.scheme) self.scheme = parts.scheme if parts.hostname is None: raise BadDsn("Missing hostname") self.host = parts.hostname if parts.port is None: self.port = self.scheme == "https" and 443 or 80 # type: int else: self.port = parts.port if not parts.username: raise BadDsn("Missing public key") self.public_key = parts.username self.secret_key = parts.password path = parts.path.rsplit("/", 1) try: self.project_id = str(int(path.pop())) except (ValueError, TypeError): raise BadDsn("Invalid project in DSN (%r)" % (parts.path or "")[1:]) self.path = "/".join(path) + "/" @property def netloc(self): # type: () -> str """The netloc part of a DSN.""" rv = self.host if (self.scheme, self.port) not in (("http", 80), ("https", 443)): rv = "%s:%s" % (rv, self.port) return rv def to_auth(self, client=None): # type: (Optional[Any]) -> Auth """Returns the auth info object for this dsn.""" return Auth( scheme=self.scheme, host=self.netloc, path=self.path, project_id=self.project_id, public_key=self.public_key, secret_key=self.secret_key, client=client, ) def __str__(self): # type: () -> str return "%s://%s%s@%s%s%s" % ( self.scheme, self.public_key, self.secret_key and "@" + self.secret_key or "", self.netloc, self.path, self.project_id, ) class Auth: """Helper object that represents the auth info.""" def __init__( self, scheme, host, project_id, public_key, secret_key=None, version=7, client=None, path="/", ): # type: (str, str, str, str, Optional[str], int, Optional[Any], str) -> None self.scheme = scheme self.host = host self.path = path self.project_id = project_id self.public_key = public_key self.secret_key = secret_key self.version = version self.client = client def get_api_url( self, type=EndpointType.ENVELOPE # type: EndpointType ): # type: (...) -> str """Returns the API url for storing events.""" return "%s://%s%sapi/%s/%s/" % ( self.scheme, self.host, self.path, self.project_id, type.value, ) def to_header(self): # type: () -> str """Returns the auth header a string.""" rv = [("sentry_key", self.public_key), ("sentry_version", self.version)] if self.client is not None: rv.append(("sentry_client", self.client)) if self.secret_key is not None: rv.append(("sentry_secret", self.secret_key)) return "Sentry " + ", ".join("%s=%s" % (key, value) for key, value in rv) class AnnotatedValue: """ Meta information for a data field in the event payload. This is to tell Relay that we have tampered with the fields value. See: https://github.com/getsentry/relay/blob/be12cd49a0f06ea932ed9b9f93a655de5d6ad6d1/relay-general/src/types/meta.rs#L407-L423 """ __slots__ = ("value", "metadata") def __init__(self, value, metadata): # type: (Optional[Any], Dict[str, Any]) -> None self.value = value self.metadata = metadata def __eq__(self, other): # type: (Any) -> bool if not isinstance(other, AnnotatedValue): return False return self.value == other.value and self.metadata == other.metadata @classmethod def removed_because_raw_data(cls): # type: () -> AnnotatedValue """The value was removed because it could not be parsed. This is done for request body values that are not json nor a form.""" return AnnotatedValue( value="", metadata={ "rem": [ # Remark [ "!raw", # Unparsable raw data "x", # The fields original value was removed ] ] }, ) @classmethod def removed_because_over_size_limit(cls): # type: () -> AnnotatedValue """The actual value was removed because the size of the field exceeded the configured maximum size (specified with the max_request_body_size sdk option)""" return AnnotatedValue( value="", metadata={ "rem": [ # Remark [ "!config", # Because of configured maximum size "x", # The fields original value was removed ] ] }, ) @classmethod def substituted_because_contains_sensitive_data(cls): # type: () -> AnnotatedValue """The actual value was removed because it contained sensitive information.""" return AnnotatedValue( value=SENSITIVE_DATA_SUBSTITUTE, metadata={ "rem": [ # Remark [ "!config", # Because of SDK configuration (in this case the config is the hard coded removal of certain django cookies) "s", # The fields original value was substituted ] ] }, ) if TYPE_CHECKING: from typing import TypeVar T = TypeVar("T") Annotated = Union[AnnotatedValue, T] def get_type_name(cls): # type: (Optional[type]) -> Optional[str] return getattr(cls, "__qualname__", None) or getattr(cls, "__name__", None) def get_type_module(cls): # type: (Optional[type]) -> Optional[str] mod = getattr(cls, "__module__", None) if mod not in (None, "builtins", "__builtins__"): return mod return None def should_hide_frame(frame): # type: (FrameType) -> bool try: mod = frame.f_globals["__name__"] if mod.startswith("sentry_sdk."): return True except (AttributeError, KeyError): pass for flag_name in "__traceback_hide__", "__tracebackhide__": try: if frame.f_locals[flag_name]: return True except Exception: pass return False def iter_stacks(tb): # type: (Optional[TracebackType]) -> Iterator[TracebackType] tb_ = tb # type: Optional[TracebackType] while tb_ is not None: if not should_hide_frame(tb_.tb_frame): yield tb_ tb_ = tb_.tb_next def get_lines_from_file( filename, # type: str lineno, # type: int max_length=None, # type: Optional[int] loader=None, # type: Optional[Any] module=None, # type: Optional[str] ): # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]] context_lines = 5 source = None if loader is not None and hasattr(loader, "get_source"): try: source_str = loader.get_source(module) # type: Optional[str] except (ImportError, IOError): source_str = None if source_str is not None: source = source_str.splitlines() if source is None: try: source = linecache.getlines(filename) except (OSError, IOError): return [], None, [] if not source: return [], None, [] lower_bound = max(0, lineno - context_lines) upper_bound = min(lineno + 1 + context_lines, len(source)) try: pre_context = [ strip_string(line.strip("\r\n"), max_length=max_length) for line in source[lower_bound:lineno] ] context_line = strip_string(source[lineno].strip("\r\n"), max_length=max_length) post_context = [ strip_string(line.strip("\r\n"), max_length=max_length) for line in source[(lineno + 1) : upper_bound] ] return pre_context, context_line, post_context except IndexError: # the file may have changed since it was loaded into memory return [], None, [] def get_source_context( frame, # type: FrameType tb_lineno, # type: int max_value_length=None, # type: Optional[int] ): # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]] try: abs_path = frame.f_code.co_filename # type: Optional[str] except Exception: abs_path = None try: module = frame.f_globals["__name__"] except Exception: return [], None, [] try: loader = frame.f_globals["__loader__"] except Exception: loader = None lineno = tb_lineno - 1 if lineno is not None and abs_path: return get_lines_from_file( abs_path, lineno, max_value_length, loader=loader, module=module ) return [], None, [] def safe_str(value): # type: (Any) -> str try: return str(value) except Exception: return safe_repr(value) def safe_repr(value): # type: (Any) -> str try: return repr(value) except Exception: return "" def filename_for_module(module, abs_path): # type: (Optional[str], Optional[str]) -> Optional[str] if not abs_path or not module: return abs_path try: if abs_path.endswith(".pyc"): abs_path = abs_path[:-1] base_module = module.split(".", 1)[0] if base_module == module: return os.path.basename(abs_path) base_module_path = sys.modules[base_module].__file__ if not base_module_path: return abs_path return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip( os.sep ) except Exception: return abs_path def serialize_frame( frame, tb_lineno=None, include_local_variables=True, include_source_context=True, max_value_length=None, custom_repr=None, ): # type: (FrameType, Optional[int], bool, bool, Optional[int], Optional[Callable[..., Optional[str]]]) -> Dict[str, Any] f_code = getattr(frame, "f_code", None) if not f_code: abs_path = None function = None else: abs_path = frame.f_code.co_filename function = frame.f_code.co_name try: module = frame.f_globals["__name__"] except Exception: module = None if tb_lineno is None: tb_lineno = frame.f_lineno rv = { "filename": filename_for_module(module, abs_path) or None, "abs_path": os.path.abspath(abs_path) if abs_path else None, "function": function or "", "module": module, "lineno": tb_lineno, } # type: Dict[str, Any] if include_source_context: rv["pre_context"], rv["context_line"], rv["post_context"] = get_source_context( frame, tb_lineno, max_value_length ) if include_local_variables: from sentry_sdk.serializer import serialize rv["vars"] = serialize( dict(frame.f_locals), is_vars=True, custom_repr=custom_repr ) return rv def current_stacktrace( include_local_variables=True, # type: bool include_source_context=True, # type: bool max_value_length=None, # type: Optional[int] ): # type: (...) -> Dict[str, Any] __tracebackhide__ = True frames = [] f = sys._getframe() # type: Optional[FrameType] while f is not None: if not should_hide_frame(f): frames.append( serialize_frame( f, include_local_variables=include_local_variables, include_source_context=include_source_context, max_value_length=max_value_length, ) ) f = f.f_back frames.reverse() return {"frames": frames} def get_errno(exc_value): # type: (BaseException) -> Optional[Any] return getattr(exc_value, "errno", None) def get_error_message(exc_value): # type: (Optional[BaseException]) -> str message = ( getattr(exc_value, "message", "") or getattr(exc_value, "detail", "") or safe_str(exc_value) ) # type: str # __notes__ should be a list of strings when notes are added # via add_note, but can be anything else if __notes__ is set # directly. We only support strings in __notes__, since that # is the correct use. notes = getattr(exc_value, "__notes__", None) # type: object if isinstance(notes, list) and len(notes) > 0: message += "\n" + "\n".join(note for note in notes if isinstance(note, str)) return message def single_exception_from_error_tuple( exc_type, # type: Optional[type] exc_value, # type: Optional[BaseException] tb, # type: Optional[TracebackType] client_options=None, # type: Optional[Dict[str, Any]] mechanism=None, # type: Optional[Dict[str, Any]] exception_id=None, # type: Optional[int] parent_id=None, # type: Optional[int] source=None, # type: Optional[str] ): # type: (...) -> Dict[str, Any] """ Creates a dict that goes into the events `exception.values` list and is ingestible by Sentry. See the Exception Interface documentation for more details: https://develop.sentry.dev/sdk/event-payloads/exception/ """ exception_value = {} # type: Dict[str, Any] exception_value["mechanism"] = ( mechanism.copy() if mechanism else {"type": "generic", "handled": True} ) if exception_id is not None: exception_value["mechanism"]["exception_id"] = exception_id if exc_value is not None: errno = get_errno(exc_value) else: errno = None if errno is not None: exception_value["mechanism"].setdefault("meta", {}).setdefault( "errno", {} ).setdefault("number", errno) if source is not None: exception_value["mechanism"]["source"] = source is_root_exception = exception_id == 0 if not is_root_exception and parent_id is not None: exception_value["mechanism"]["parent_id"] = parent_id exception_value["mechanism"]["type"] = "chained" if is_root_exception and "type" not in exception_value["mechanism"]: exception_value["mechanism"]["type"] = "generic" is_exception_group = BaseExceptionGroup is not None and isinstance( exc_value, BaseExceptionGroup ) if is_exception_group: exception_value["mechanism"]["is_exception_group"] = True exception_value["module"] = get_type_module(exc_type) exception_value["type"] = get_type_name(exc_type) exception_value["value"] = get_error_message(exc_value) if client_options is None: include_local_variables = True include_source_context = True max_value_length = DEFAULT_MAX_VALUE_LENGTH # fallback custom_repr = None else: include_local_variables = client_options["include_local_variables"] include_source_context = client_options["include_source_context"] max_value_length = client_options["max_value_length"] custom_repr = client_options.get("custom_repr") frames = [ serialize_frame( tb.tb_frame, tb_lineno=tb.tb_lineno, include_local_variables=include_local_variables, include_source_context=include_source_context, max_value_length=max_value_length, custom_repr=custom_repr, ) for tb in iter_stacks(tb) ] if frames: exception_value["stacktrace"] = {"frames": frames} return exception_value HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__") if HAS_CHAINED_EXCEPTIONS: def walk_exception_chain(exc_info): # type: (ExcInfo) -> Iterator[ExcInfo] exc_type, exc_value, tb = exc_info seen_exceptions = [] seen_exception_ids = set() # type: Set[int] while ( exc_type is not None and exc_value is not None and id(exc_value) not in seen_exception_ids ): yield exc_type, exc_value, tb # Avoid hashing random types we don't know anything # about. Use the list to keep a ref so that the `id` is # not used for another object. seen_exceptions.append(exc_value) seen_exception_ids.add(id(exc_value)) if exc_value.__suppress_context__: cause = exc_value.__cause__ else: cause = exc_value.__context__ if cause is None: break exc_type = type(cause) exc_value = cause tb = getattr(cause, "__traceback__", None) else: def walk_exception_chain(exc_info): # type: (ExcInfo) -> Iterator[ExcInfo] yield exc_info def exceptions_from_error( exc_type, # type: Optional[type] exc_value, # type: Optional[BaseException] tb, # type: Optional[TracebackType] client_options=None, # type: Optional[Dict[str, Any]] mechanism=None, # type: Optional[Dict[str, Any]] exception_id=0, # type: int parent_id=0, # type: int source=None, # type: Optional[str] ): # type: (...) -> Tuple[int, List[Dict[str, Any]]] """ Creates the list of exceptions. This can include chained exceptions and exceptions from an ExceptionGroup. See the Exception Interface documentation for more details: https://develop.sentry.dev/sdk/event-payloads/exception/ """ parent = single_exception_from_error_tuple( exc_type=exc_type, exc_value=exc_value, tb=tb, client_options=client_options, mechanism=mechanism, exception_id=exception_id, parent_id=parent_id, source=source, ) exceptions = [parent] parent_id = exception_id exception_id += 1 should_supress_context = hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__ # type: ignore if should_supress_context: # Add direct cause. # The field `__cause__` is set when raised with the exception (using the `from` keyword). exception_has_cause = ( exc_value and hasattr(exc_value, "__cause__") and exc_value.__cause__ is not None ) if exception_has_cause: cause = exc_value.__cause__ # type: ignore (exception_id, child_exceptions) = exceptions_from_error( exc_type=type(cause), exc_value=cause, tb=getattr(cause, "__traceback__", None), client_options=client_options, mechanism=mechanism, exception_id=exception_id, source="__cause__", ) exceptions.extend(child_exceptions) else: # Add indirect cause. # The field `__context__` is assigned if another exception occurs while handling the exception. exception_has_content = ( exc_value and hasattr(exc_value, "__context__") and exc_value.__context__ is not None ) if exception_has_content: context = exc_value.__context__ # type: ignore (exception_id, child_exceptions) = exceptions_from_error( exc_type=type(context), exc_value=context, tb=getattr(context, "__traceback__", None), client_options=client_options, mechanism=mechanism, exception_id=exception_id, source="__context__", ) exceptions.extend(child_exceptions) # Add exceptions from an ExceptionGroup. is_exception_group = exc_value and hasattr(exc_value, "exceptions") if is_exception_group: for idx, e in enumerate(exc_value.exceptions): # type: ignore (exception_id, child_exceptions) = exceptions_from_error( exc_type=type(e), exc_value=e, tb=getattr(e, "__traceback__", None), client_options=client_options, mechanism=mechanism, exception_id=exception_id, parent_id=parent_id, source="exceptions[%s]" % idx, ) exceptions.extend(child_exceptions) return (exception_id, exceptions) def exceptions_from_error_tuple( exc_info, # type: ExcInfo client_options=None, # type: Optional[Dict[str, Any]] mechanism=None, # type: Optional[Dict[str, Any]] ): # type: (...) -> List[Dict[str, Any]] exc_type, exc_value, tb = exc_info is_exception_group = BaseExceptionGroup is not None and isinstance( exc_value, BaseExceptionGroup ) if is_exception_group: (_, exceptions) = exceptions_from_error( exc_type=exc_type, exc_value=exc_value, tb=tb, client_options=client_options, mechanism=mechanism, exception_id=0, parent_id=0, ) else: exceptions = [] for exc_type, exc_value, tb in walk_exception_chain(exc_info): exceptions.append( single_exception_from_error_tuple( exc_type, exc_value, tb, client_options, mechanism ) ) exceptions.reverse() return exceptions def to_string(value): # type: (str) -> str try: return str(value) except UnicodeDecodeError: return repr(value)[1:-1] def iter_event_stacktraces(event): # type: (Event) -> Iterator[Dict[str, Any]] if "stacktrace" in event: yield event["stacktrace"] if "threads" in event: for thread in event["threads"].get("values") or (): if "stacktrace" in thread: yield thread["stacktrace"] if "exception" in event: for exception in event["exception"].get("values") or (): if "stacktrace" in exception: yield exception["stacktrace"] def iter_event_frames(event): # type: (Event) -> Iterator[Dict[str, Any]] for stacktrace in iter_event_stacktraces(event): for frame in stacktrace.get("frames") or (): yield frame def handle_in_app(event, in_app_exclude=None, in_app_include=None, project_root=None): # type: (Event, Optional[List[str]], Optional[List[str]], Optional[str]) -> Event for stacktrace in iter_event_stacktraces(event): set_in_app_in_frames( stacktrace.get("frames"), in_app_exclude=in_app_exclude, in_app_include=in_app_include, project_root=project_root, ) return event def set_in_app_in_frames(frames, in_app_exclude, in_app_include, project_root=None): # type: (Any, Optional[List[str]], Optional[List[str]], Optional[str]) -> Optional[Any] if not frames: return None for frame in frames: # if frame has already been marked as in_app, skip it current_in_app = frame.get("in_app") if current_in_app is not None: continue module = frame.get("module") # check if module in frame is in the list of modules to include if _module_in_list(module, in_app_include): frame["in_app"] = True continue # check if module in frame is in the list of modules to exclude if _module_in_list(module, in_app_exclude): frame["in_app"] = False continue # if frame has no abs_path, skip further checks abs_path = frame.get("abs_path") if abs_path is None: continue if _is_external_source(abs_path): frame["in_app"] = False continue if _is_in_project_root(abs_path, project_root): frame["in_app"] = True continue return frames def exc_info_from_error(error): # type: (Union[BaseException, ExcInfo]) -> ExcInfo if isinstance(error, tuple) and len(error) == 3: exc_type, exc_value, tb = error elif isinstance(error, BaseException): tb = getattr(error, "__traceback__", None) if tb is not None: exc_type = type(error) exc_value = error else: exc_type, exc_value, tb = sys.exc_info() if exc_value is not error: tb = None exc_value = error exc_type = type(error) else: raise ValueError("Expected Exception object to report, got %s!" % type(error)) exc_info = (exc_type, exc_value, tb) if TYPE_CHECKING: # This cast is safe because exc_type and exc_value are either both # None or both not None. exc_info = cast(ExcInfo, exc_info) return exc_info def event_from_exception( exc_info, # type: Union[BaseException, ExcInfo] client_options=None, # type: Optional[Dict[str, Any]] mechanism=None, # type: Optional[Dict[str, Any]] ): # type: (...) -> Tuple[Event, Dict[str, Any]] exc_info = exc_info_from_error(exc_info) hint = event_hint_with_exc_info(exc_info) return ( { "level": "error", "exception": { "values": exceptions_from_error_tuple( exc_info, client_options, mechanism ) }, }, hint, ) def _module_in_list(name, items): # type: (Optional[str], Optional[List[str]]) -> bool if name is None: return False if not items: return False for item in items: if item == name or name.startswith(item + "."): return True return False def _is_external_source(abs_path): # type: (Optional[str]) -> bool # check if frame is in 'site-packages' or 'dist-packages' if abs_path is None: return False external_source = ( re.search(r"[\\/](?:dist|site)-packages[\\/]", abs_path) is not None ) return external_source def _is_in_project_root(abs_path, project_root): # type: (Optional[str], Optional[str]) -> bool if abs_path is None or project_root is None: return False # check if path is in the project root if abs_path.startswith(project_root): return True return False def _truncate_by_bytes(string, max_bytes): # type: (str, int) -> str """ Truncate a UTF-8-encodable string to the last full codepoint so that it fits in max_bytes. """ truncated = string.encode("utf-8")[: max_bytes - 3].decode("utf-8", errors="ignore") return truncated + "..." def _get_size_in_bytes(value): # type: (str) -> Optional[int] try: return len(value.encode("utf-8")) except (UnicodeEncodeError, UnicodeDecodeError): return None def strip_string(value, max_length=None): # type: (str, Optional[int]) -> Union[AnnotatedValue, str] if not value: return value if max_length is None: max_length = DEFAULT_MAX_VALUE_LENGTH byte_size = _get_size_in_bytes(value) text_size = len(value) if byte_size is not None and byte_size > max_length: # truncate to max_length bytes, preserving code points truncated_value = _truncate_by_bytes(value, max_length) elif text_size is not None and text_size > max_length: # fallback to truncating by string length truncated_value = value[: max_length - 3] + "..." else: return value return AnnotatedValue( value=truncated_value, metadata={ "len": byte_size or text_size, "rem": [["!limit", "x", max_length - 3, max_length]], }, ) def parse_version(version): # type: (str) -> Optional[Tuple[int, ...]] """ Parses a version string into a tuple of integers. This uses the parsing loging from PEP 440: https://peps.python.org/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions """ VERSION_PATTERN = r""" # noqa: N806 v? (?: (?:(?P[0-9]+)!)? # epoch (?P[0-9]+(?:\.[0-9]+)*) # release segment (?P
                                          # pre-release
                [-_\.]?
                (?P(a|b|c|rc|alpha|beta|pre|preview))
                [-_\.]?
                (?P[0-9]+)?
            )?
            (?P                                         # post release
                (?:-(?P[0-9]+))
                |
                (?:
                    [-_\.]?
                    (?Ppost|rev|r)
                    [-_\.]?
                    (?P[0-9]+)?
                )
            )?
            (?P                                          # dev release
                [-_\.]?
                (?Pdev)
                [-_\.]?
                (?P[0-9]+)?
            )?
        )
        (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
    """

    pattern = re.compile(
        r"^\s*" + VERSION_PATTERN + r"\s*$",
        re.VERBOSE | re.IGNORECASE,
    )

    try:
        release = pattern.match(version).groupdict()["release"]  # type: ignore
        release_tuple = tuple(map(int, release.split(".")[:3]))  # type: Tuple[int, ...]
    except (TypeError, ValueError, AttributeError):
        return None

    return release_tuple


def _is_contextvars_broken():
    # type: () -> bool
    """
    Returns whether gevent/eventlet have patched the stdlib in a way where thread locals are now more "correct" than contextvars.
    """
    try:
        import gevent
        from gevent.monkey import is_object_patched

        # Get the MAJOR and MINOR version numbers of Gevent
        version_tuple = tuple(
            [int(part) for part in re.split(r"a|b|rc|\.", gevent.__version__)[:2]]
        )
        if is_object_patched("threading", "local"):
            # Gevent 20.9.0 depends on Greenlet 0.4.17 which natively handles switching
            # context vars when greenlets are switched, so, Gevent 20.9.0+ is all fine.
            # Ref: https://github.com/gevent/gevent/blob/83c9e2ae5b0834b8f84233760aabe82c3ba065b4/src/gevent/monkey.py#L604-L609
            # Gevent 20.5, that doesn't depend on Greenlet 0.4.17 with native support
            # for contextvars, is able to patch both thread locals and contextvars, in
            # that case, check if contextvars are effectively patched.
            if (
                # Gevent 20.9.0+
                (sys.version_info >= (3, 7) and version_tuple >= (20, 9))
                # Gevent 20.5.0+ or Python < 3.7
                or (is_object_patched("contextvars", "ContextVar"))
            ):
                return False

            return True
    except ImportError:
        pass

    try:
        import greenlet
        from eventlet.patcher import is_monkey_patched  # type: ignore

        greenlet_version = parse_version(greenlet.__version__)

        if greenlet_version is None:
            logger.error(
                "Internal error in Sentry SDK: Could not parse Greenlet version from greenlet.__version__."
            )
            return False

        if is_monkey_patched("thread") and greenlet_version < (0, 5):
            return True
    except ImportError:
        pass

    return False


def _make_threadlocal_contextvars(local):
    # type: (type) -> type
    class ContextVar:
        # Super-limited impl of ContextVar

        def __init__(self, name, default=None):
            # type: (str, Any) -> None
            self._name = name
            self._default = default
            self._local = local()
            self._original_local = local()

        def get(self, default=None):
            # type: (Any) -> Any
            return getattr(self._local, "value", default or self._default)

        def set(self, value):
            # type: (Any) -> Any
            token = str(random.getrandbits(64))
            original_value = self.get()
            setattr(self._original_local, token, original_value)
            self._local.value = value
            return token

        def reset(self, token):
            # type: (Any) -> None
            self._local.value = getattr(self._original_local, token)
            # delete the original value (this way it works in Python 3.6+)
            del self._original_local.__dict__[token]

    return ContextVar


def _get_contextvars():
    # type: () -> Tuple[bool, type]
    """
    Figure out the "right" contextvars installation to use. Returns a
    `contextvars.ContextVar`-like class with a limited API.

    See https://docs.sentry.io/platforms/python/contextvars/ for more information.
    """
    if not _is_contextvars_broken():
        # aiocontextvars is a PyPI package that ensures that the contextvars
        # backport (also a PyPI package) works with asyncio under Python 3.6
        #
        # Import it if available.
        if sys.version_info < (3, 7):
            # `aiocontextvars` is absolutely required for functional
            # contextvars on Python 3.6.
            try:
                from aiocontextvars import ContextVar

                return True, ContextVar
            except ImportError:
                pass
        else:
            # On Python 3.7 contextvars are functional.
            try:
                from contextvars import ContextVar

                return True, ContextVar
            except ImportError:
                pass

    # Fall back to basic thread-local usage.

    from threading import local

    return False, _make_threadlocal_contextvars(local)


HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()

CONTEXTVARS_ERROR_MESSAGE = """

With asyncio/ASGI applications, the Sentry SDK requires a functional
installation of `contextvars` to avoid leaking scope/context data across
requests.

Please refer to https://docs.sentry.io/platforms/python/contextvars/ for more information.
"""


def qualname_from_function(func):
    # type: (Callable[..., Any]) -> Optional[str]
    """Return the qualified name of func. Works with regular function, lambda, partial and partialmethod."""
    func_qualname = None  # type: Optional[str]

    # Python 2
    try:
        return "%s.%s.%s" % (
            func.im_class.__module__,  # type: ignore
            func.im_class.__name__,  # type: ignore
            func.__name__,
        )
    except Exception:
        pass

    prefix, suffix = "", ""

    if isinstance(func, partial) and hasattr(func.func, "__name__"):
        prefix, suffix = "partial()"
        func = func.func
    else:
        # The _partialmethod attribute of methods wrapped with partialmethod() was renamed to __partialmethod__ in CPython 3.13:
        # https://github.com/python/cpython/pull/16600
        partial_method = getattr(func, "_partialmethod", None) or getattr(
            func, "__partialmethod__", None
        )
        if isinstance(partial_method, partialmethod):
            prefix, suffix = "partialmethod()"
            func = partial_method.func

    if hasattr(func, "__qualname__"):
        func_qualname = func.__qualname__
    elif hasattr(func, "__name__"):  # Python 2.7 has no __qualname__
        func_qualname = func.__name__

    # Python 3: methods, functions, classes
    if func_qualname is not None:
        if hasattr(func, "__module__"):
            func_qualname = func.__module__ + "." + func_qualname
        func_qualname = prefix + func_qualname + suffix

    return func_qualname


def transaction_from_function(func):
    # type: (Callable[..., Any]) -> Optional[str]
    return qualname_from_function(func)


disable_capture_event = ContextVar("disable_capture_event")


class ServerlessTimeoutWarning(Exception):  # noqa: N818
    """Raised when a serverless method is about to reach its timeout."""

    pass


class TimeoutThread(threading.Thread):
    """Creates a Thread which runs (sleeps) for a time duration equal to
    waiting_time and raises a custom ServerlessTimeout exception.
    """

    def __init__(self, waiting_time, configured_timeout):
        # type: (float, int) -> None
        threading.Thread.__init__(self)
        self.waiting_time = waiting_time
        self.configured_timeout = configured_timeout
        self._stop_event = threading.Event()

    def stop(self):
        # type: () -> None
        self._stop_event.set()

    def run(self):
        # type: () -> None

        self._stop_event.wait(self.waiting_time)

        if self._stop_event.is_set():
            return

        integer_configured_timeout = int(self.configured_timeout)

        # Setting up the exact integer value of configured time(in seconds)
        if integer_configured_timeout < self.configured_timeout:
            integer_configured_timeout = integer_configured_timeout + 1

        # Raising Exception after timeout duration is reached
        raise ServerlessTimeoutWarning(
            "WARNING : Function is expected to get timed out. Configured timeout duration = {} seconds.".format(
                integer_configured_timeout
            )
        )


def to_base64(original):
    # type: (str) -> Optional[str]
    """
    Convert a string to base64, via UTF-8. Returns None on invalid input.
    """
    base64_string = None

    try:
        utf8_bytes = original.encode("UTF-8")
        base64_bytes = base64.b64encode(utf8_bytes)
        base64_string = base64_bytes.decode("UTF-8")
    except Exception as err:
        logger.warning("Unable to encode {orig} to base64:".format(orig=original), err)

    return base64_string


def from_base64(base64_string):
    # type: (str) -> Optional[str]
    """
    Convert a string from base64, via UTF-8. Returns None on invalid input.
    """
    utf8_string = None

    try:
        only_valid_chars = BASE64_ALPHABET.match(base64_string)
        assert only_valid_chars

        base64_bytes = base64_string.encode("UTF-8")
        utf8_bytes = base64.b64decode(base64_bytes)
        utf8_string = utf8_bytes.decode("UTF-8")
    except Exception as err:
        logger.warning(
            "Unable to decode {b64} from base64:".format(b64=base64_string), err
        )

    return utf8_string


Components = namedtuple("Components", ["scheme", "netloc", "path", "query", "fragment"])


def sanitize_url(url, remove_authority=True, remove_query_values=True, split=False):
    # type: (str, bool, bool, bool) -> Union[str, Components]
    """
    Removes the authority and query parameter values from a given URL.
    """
    parsed_url = urlsplit(url)
    query_params = parse_qs(parsed_url.query, keep_blank_values=True)

    # strip username:password (netloc can be usr:pwd@example.com)
    if remove_authority:
        netloc_parts = parsed_url.netloc.split("@")
        if len(netloc_parts) > 1:
            netloc = "%s:%s@%s" % (
                SENSITIVE_DATA_SUBSTITUTE,
                SENSITIVE_DATA_SUBSTITUTE,
                netloc_parts[-1],
            )
        else:
            netloc = parsed_url.netloc
    else:
        netloc = parsed_url.netloc

    # strip values from query string
    if remove_query_values:
        query_string = unquote(
            urlencode({key: SENSITIVE_DATA_SUBSTITUTE for key in query_params})
        )
    else:
        query_string = parsed_url.query

    components = Components(
        scheme=parsed_url.scheme,
        netloc=netloc,
        query=query_string,
        path=parsed_url.path,
        fragment=parsed_url.fragment,
    )

    if split:
        return components
    else:
        return urlunsplit(components)


ParsedUrl = namedtuple("ParsedUrl", ["url", "query", "fragment"])


def parse_url(url, sanitize=True):
    # type: (str, bool) -> ParsedUrl
    """
    Splits a URL into a url (including path), query and fragment. If sanitize is True, the query
    parameters will be sanitized to remove sensitive data. The autority (username and password)
    in the URL will always be removed.
    """
    parsed_url = sanitize_url(
        url, remove_authority=True, remove_query_values=sanitize, split=True
    )

    base_url = urlunsplit(
        Components(
            scheme=parsed_url.scheme,  # type: ignore
            netloc=parsed_url.netloc,  # type: ignore
            query="",
            path=parsed_url.path,  # type: ignore
            fragment="",
        )
    )

    return ParsedUrl(
        url=base_url,
        query=parsed_url.query,  # type: ignore
        fragment=parsed_url.fragment,  # type: ignore
    )


def is_valid_sample_rate(rate, source):
    # type: (Any, str) -> bool
    """
    Checks the given sample rate to make sure it is valid type and value (a
    boolean or a number between 0 and 1, inclusive).
    """

    # both booleans and NaN are instances of Real, so a) checking for Real
    # checks for the possibility of a boolean also, and b) we have to check
    # separately for NaN and Decimal does not derive from Real so need to check that too
    if not isinstance(rate, (Real, Decimal)) or math.isnan(rate):
        logger.warning(
            "{source} Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
                source=source, rate=rate, type=type(rate)
            )
        )
        return False

    # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False
    rate = float(rate)
    if rate < 0 or rate > 1:
        logger.warning(
            "{source} Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format(
                source=source, rate=rate
            )
        )
        return False

    return True


def match_regex_list(item, regex_list=None, substring_matching=False):
    # type: (str, Optional[List[str]], bool) -> bool
    if regex_list is None:
        return False

    for item_matcher in regex_list:
        if not substring_matching and item_matcher[-1] != "$":
            item_matcher += "$"

        matched = re.search(item_matcher, item)
        if matched:
            return True

    return False


def is_sentry_url(client, url):
    # type: (sentry_sdk.client.BaseClient, str) -> bool
    """
    Determines whether the given URL matches the Sentry DSN.
    """
    return (
        client is not None
        and client.transport is not None
        and client.transport.parsed_dsn is not None
        and client.transport.parsed_dsn.netloc in url
    )


def _generate_installed_modules():
    # type: () -> Iterator[Tuple[str, str]]
    try:
        from importlib import metadata

        yielded = set()
        for dist in metadata.distributions():
            name = dist.metadata["Name"]
            # `metadata` values may be `None`, see:
            # https://github.com/python/cpython/issues/91216
            # and
            # https://github.com/python/importlib_metadata/issues/371
            if name is not None:
                normalized_name = _normalize_module_name(name)
                if dist.version is not None and normalized_name not in yielded:
                    yield normalized_name, dist.version
                    yielded.add(normalized_name)

    except ImportError:
        # < py3.8
        try:
            import pkg_resources
        except ImportError:
            return

        for info in pkg_resources.working_set:
            yield _normalize_module_name(info.key), info.version


def _normalize_module_name(name):
    # type: (str) -> str
    return name.lower()


def _get_installed_modules():
    # type: () -> Dict[str, str]
    global _installed_modules
    if _installed_modules is None:
        _installed_modules = dict(_generate_installed_modules())
    return _installed_modules


def package_version(package):
    # type: (str) -> Optional[Tuple[int, ...]]
    installed_packages = _get_installed_modules()
    version = installed_packages.get(package)
    if version is None:
        return None

    return parse_version(version)


def reraise(tp, value, tb=None):
    # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> NoReturn
    assert value is not None
    if value.__traceback__ is not tb:
        raise value.with_traceback(tb)
    raise value


def _no_op(*_a, **_k):
    # type: (*Any, **Any) -> None
    """No-op function for ensure_integration_enabled."""
    pass


if TYPE_CHECKING:

    @overload
    def ensure_integration_enabled(
        integration,  # type: type[sentry_sdk.integrations.Integration]
        original_function,  # type: Callable[P, R]
    ):
        # type: (...) -> Callable[[Callable[P, R]], Callable[P, R]]
        ...

    @overload
    def ensure_integration_enabled(
        integration,  # type: type[sentry_sdk.integrations.Integration]
    ):
        # type: (...) -> Callable[[Callable[P, None]], Callable[P, None]]
        ...


def ensure_integration_enabled(
    integration,  # type: type[sentry_sdk.integrations.Integration]
    original_function=_no_op,  # type: Union[Callable[P, R], Callable[P, None]]
):
    # type: (...) -> Callable[[Callable[P, R]], Callable[P, R]]
    """
    Ensures a given integration is enabled prior to calling a Sentry-patched function.

    The function takes as its parameters the integration that must be enabled and the original
    function that the SDK is patching. The function returns a function that takes the
    decorated (Sentry-patched) function as its parameter, and returns a function that, when
    called, checks whether the given integration is enabled. If the integration is enabled, the
    function calls the decorated, Sentry-patched function. If the integration is not enabled,
    the original function is called.

    The function also takes care of preserving the original function's signature and docstring.

    Example usage:

    ```python
    @ensure_integration_enabled(MyIntegration, my_function)
    def patch_my_function():
        with sentry_sdk.start_transaction(...):
            return my_function()
    ```
    """
    if TYPE_CHECKING:
        # Type hint to ensure the default function has the right typing. The overloads
        # ensure the default _no_op function is only used when R is None.
        original_function = cast(Callable[P, R], original_function)

    def patcher(sentry_patched_function):
        # type: (Callable[P, R]) -> Callable[P, R]
        def runner(*args: "P.args", **kwargs: "P.kwargs"):
            # type: (...) -> R
            if sentry_sdk.get_client().get_integration(integration) is None:
                return original_function(*args, **kwargs)

            return sentry_patched_function(*args, **kwargs)

        if original_function is _no_op:
            return wraps(sentry_patched_function)(runner)

        return wraps(original_function)(runner)

    return patcher


if PY37:

    def nanosecond_time():
        # type: () -> int
        return time.perf_counter_ns()

else:

    def nanosecond_time():
        # type: () -> int
        return int(time.perf_counter() * 1e9)


def now():
    # type: () -> float
    return time.perf_counter()


try:
    from gevent import get_hub as get_gevent_hub
    from gevent.monkey import is_module_patched
except ImportError:

    # it's not great that the signatures are different, get_hub can't return None
    # consider adding an if TYPE_CHECKING to change the signature to Optional[Hub]
    def get_gevent_hub():  # type: ignore[misc]
        # type: () -> Optional[Hub]
        return None

    def is_module_patched(mod_name):
        # type: (str) -> bool
        # unable to import from gevent means no modules have been patched
        return False


def is_gevent():
    # type: () -> bool
    return is_module_patched("threading") or is_module_patched("_thread")


def get_current_thread_meta(thread=None):
    # type: (Optional[threading.Thread]) -> Tuple[Optional[int], Optional[str]]
    """
    Try to get the id of the current thread, with various fall backs.
    """

    # if a thread is specified, that takes priority
    if thread is not None:
        try:
            thread_id = thread.ident
            thread_name = thread.name
            if thread_id is not None:
                return thread_id, thread_name
        except AttributeError:
            pass

    # if the app is using gevent, we should look at the gevent hub first
    # as the id there differs from what the threading module reports
    if is_gevent():
        gevent_hub = get_gevent_hub()
        if gevent_hub is not None:
            try:
                # this is undocumented, so wrap it in try except to be safe
                return gevent_hub.thread_ident, None
            except AttributeError:
                pass

    # use the current thread's id if possible
    try:
        thread = threading.current_thread()
        thread_id = thread.ident
        thread_name = thread.name
        if thread_id is not None:
            return thread_id, thread_name
    except AttributeError:
        pass

    # if we can't get the current thread id, fall back to the main thread id
    try:
        thread = threading.main_thread()
        thread_id = thread.ident
        thread_name = thread.name
        if thread_id is not None:
            return thread_id, thread_name
    except AttributeError:
        pass

    # we've tried everything, time to give up
    return None, None
sentry-python-2.18.0/sentry_sdk/worker.py000066400000000000000000000105601471214654000205230ustar00rootroot00000000000000import os
import threading

from time import sleep, time
from sentry_sdk._queue import Queue, FullError
from sentry_sdk.utils import logger
from sentry_sdk.consts import DEFAULT_QUEUE_SIZE

from typing import TYPE_CHECKING

if TYPE_CHECKING:
    from typing import Any
    from typing import Optional
    from typing import Callable


_TERMINATOR = object()


class BackgroundWorker:
    def __init__(self, queue_size=DEFAULT_QUEUE_SIZE):
        # type: (int) -> None
        self._queue = Queue(queue_size)  # type: Queue
        self._lock = threading.Lock()
        self._thread = None  # type: Optional[threading.Thread]
        self._thread_for_pid = None  # type: Optional[int]

    @property
    def is_alive(self):
        # type: () -> bool
        if self._thread_for_pid != os.getpid():
            return False
        if not self._thread:
            return False
        return self._thread.is_alive()

    def _ensure_thread(self):
        # type: () -> None
        if not self.is_alive:
            self.start()

    def _timed_queue_join(self, timeout):
        # type: (float) -> bool
        deadline = time() + timeout
        queue = self._queue

        queue.all_tasks_done.acquire()

        try:
            while queue.unfinished_tasks:
                delay = deadline - time()
                if delay <= 0:
                    return False
                queue.all_tasks_done.wait(timeout=delay)

            return True
        finally:
            queue.all_tasks_done.release()

    def start(self):
        # type: () -> None
        with self._lock:
            if not self.is_alive:
                self._thread = threading.Thread(
                    target=self._target, name="sentry-sdk.BackgroundWorker"
                )
                self._thread.daemon = True
                try:
                    self._thread.start()
                    self._thread_for_pid = os.getpid()
                except RuntimeError:
                    # At this point we can no longer start because the interpreter
                    # is already shutting down.  Sadly at this point we can no longer
                    # send out events.
                    self._thread = None

    def kill(self):
        # type: () -> None
        """
        Kill worker thread. Returns immediately. Not useful for
        waiting on shutdown for events, use `flush` for that.
        """
        logger.debug("background worker got kill request")
        with self._lock:
            if self._thread:
                try:
                    self._queue.put_nowait(_TERMINATOR)
                except FullError:
                    logger.debug("background worker queue full, kill failed")

                self._thread = None
                self._thread_for_pid = None

    def flush(self, timeout, callback=None):
        # type: (float, Optional[Any]) -> None
        logger.debug("background worker got flush request")
        with self._lock:
            if self.is_alive and timeout > 0.0:
                self._wait_flush(timeout, callback)
        logger.debug("background worker flushed")

    def full(self):
        # type: () -> bool
        return self._queue.full()

    def _wait_flush(self, timeout, callback):
        # type: (float, Optional[Any]) -> None
        initial_timeout = min(0.1, timeout)
        if not self._timed_queue_join(initial_timeout):
            pending = self._queue.qsize() + 1
            logger.debug("%d event(s) pending on flush", pending)
            if callback is not None:
                callback(pending, timeout)

            if not self._timed_queue_join(timeout - initial_timeout):
                pending = self._queue.qsize() + 1
                logger.error("flush timed out, dropped %s events", pending)

    def submit(self, callback):
        # type: (Callable[[], None]) -> bool
        self._ensure_thread()
        try:
            self._queue.put_nowait(callback)
            return True
        except FullError:
            return False

    def _target(self):
        # type: () -> None
        while True:
            callback = self._queue.get()
            try:
                if callback is _TERMINATOR:
                    break
                try:
                    callback()
                except Exception:
                    logger.error("Failed processing job", exc_info=True)
            finally:
                self._queue.task_done()
            sleep(0)
sentry-python-2.18.0/setup.py000066400000000000000000000075271471214654000161760ustar00rootroot00000000000000#!/usr/bin/env python

"""
Sentry-Python - Sentry SDK for Python
=====================================

**Sentry-Python is an SDK for Sentry.** Check out `GitHub
`_ to find out more.
"""

import os
from setuptools import setup, find_packages

here = os.path.abspath(os.path.dirname(__file__))


def get_file_text(file_name):
    with open(os.path.join(here, file_name)) as in_file:
        return in_file.read()


setup(
    name="sentry-sdk",
    version="2.18.0",
    author="Sentry Team and Contributors",
    author_email="hello@sentry.io",
    url="https://github.com/getsentry/sentry-python",
    project_urls={
        "Documentation": "https://docs.sentry.io/platforms/python/",
        "Changelog": "https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md",
    },
    description="Python client for Sentry (https://sentry.io)",
    long_description=get_file_text("README.md"),
    long_description_content_type="text/markdown",
    packages=find_packages(exclude=("tests", "tests.*")),
    # PEP 561
    package_data={"sentry_sdk": ["py.typed"]},
    zip_safe=False,
    license="MIT",
    python_requires=">=3.6",
    install_requires=[
        "urllib3>=1.26.11",
        "certifi",
    ],
    extras_require={
        "aiohttp": ["aiohttp>=3.5"],
        "anthropic": ["anthropic>=0.16"],
        "arq": ["arq>=0.23"],
        "asyncpg": ["asyncpg>=0.23"],
        "beam": ["apache-beam>=2.12"],
        "bottle": ["bottle>=0.12.13"],
        "celery": ["celery>=3"],
        "celery-redbeat": ["celery-redbeat>=2"],
        "chalice": ["chalice>=1.16.0"],
        "clickhouse-driver": ["clickhouse-driver>=0.2.0"],
        "django": ["django>=1.8"],
        "falcon": ["falcon>=1.4"],
        "fastapi": ["fastapi>=0.79.0"],
        "flask": ["flask>=0.11", "blinker>=1.1", "markupsafe"],
        "grpcio": ["grpcio>=1.21.1", "protobuf>=3.8.0"],
        "http2": ["httpcore[http2]==1.*"],
        "httpx": ["httpx>=0.16.0"],
        "huey": ["huey>=2"],
        "huggingface_hub": ["huggingface_hub>=0.22"],
        "langchain": ["langchain>=0.0.210"],
        "launchdarkly": ["launchdarkly-server-sdk>=9.8.0"],
        "litestar": ["litestar>=2.0.0"],
        "loguru": ["loguru>=0.5"],
        "openai": ["openai>=1.0.0", "tiktoken>=0.3.0"],
        "openfeature": ["openfeature-sdk>=0.7.1"],
        "opentelemetry": ["opentelemetry-distro>=0.35b0"],
        "opentelemetry-experimental": ["opentelemetry-distro"],
        "pure_eval": ["pure_eval", "executing", "asttokens"],
        "pymongo": ["pymongo>=3.1"],
        "pyspark": ["pyspark>=2.4.4"],
        "quart": ["quart>=0.16.1", "blinker>=1.1"],
        "rq": ["rq>=0.6"],
        "sanic": ["sanic>=0.8"],
        "sqlalchemy": ["sqlalchemy>=1.2"],
        "starlette": ["starlette>=0.19.1"],
        "starlite": ["starlite>=1.48"],
        "tornado": ["tornado>=6"],
    },
    entry_points={
        "opentelemetry_propagator": [
            "sentry=sentry_sdk.integrations.opentelemetry:SentryPropagator"
        ]
    },
    classifiers=[
        "Development Status :: 5 - Production/Stable",
        "Environment :: Web Environment",
        "Intended Audience :: Developers",
        "License :: OSI Approved :: BSD License",
        "Operating System :: OS Independent",
        "Programming Language :: Python",
        "Programming Language :: Python :: 3",
        "Programming Language :: Python :: 3.6",
        "Programming Language :: Python :: 3.7",
        "Programming Language :: Python :: 3.8",
        "Programming Language :: Python :: 3.9",
        "Programming Language :: Python :: 3.10",
        "Programming Language :: Python :: 3.11",
        "Programming Language :: Python :: 3.12",
        "Programming Language :: Python :: 3.13",
        "Topic :: Software Development :: Libraries :: Python Modules",
    ],
    options={"bdist_wheel": {"universal": "1"}},
)
sentry-python-2.18.0/tests/000077500000000000000000000000001471214654000156135ustar00rootroot00000000000000sentry-python-2.18.0/tests/__init__.py000066400000000000000000000006611471214654000177270ustar00rootroot00000000000000import sys
import warnings

# This is used in _capture_internal_warnings. We need to run this at import
# time because that's where many deprecation warnings might get thrown.
#
# This lives in tests/__init__.py because apparently even tests/conftest.py
# gets loaded too late.
assert "sentry_sdk" not in sys.modules

_warning_recorder_mgr = warnings.catch_warnings(record=True)
_warning_recorder = _warning_recorder_mgr.__enter__()
sentry-python-2.18.0/tests/conftest.py000066400000000000000000000447471471214654000200320ustar00rootroot00000000000000import json
import os
import socket
import warnings
from threading import Thread
from contextlib import contextmanager
from http.server import BaseHTTPRequestHandler, HTTPServer
from unittest import mock

import pytest
import jsonschema

try:
    import gevent
except ImportError:
    gevent = None

try:
    import eventlet
except ImportError:
    eventlet = None

import sentry_sdk
import sentry_sdk.utils
from sentry_sdk.envelope import Envelope
from sentry_sdk.integrations import (  # noqa: F401
    _DEFAULT_INTEGRATIONS,
    _installed_integrations,
    _processed_integrations,
)
from sentry_sdk.profiler import teardown_profiler
from sentry_sdk.profiler.continuous_profiler import teardown_continuous_profiler
from sentry_sdk.transport import Transport
from sentry_sdk.utils import reraise

from tests import _warning_recorder, _warning_recorder_mgr

from typing import TYPE_CHECKING

if TYPE_CHECKING:
    from typing import Optional
    from collections.abc import Iterator


SENTRY_EVENT_SCHEMA = "./checkouts/data-schemas/relay/event.schema.json"

if not os.path.isfile(SENTRY_EVENT_SCHEMA):
    SENTRY_EVENT_SCHEMA = None
else:
    with open(SENTRY_EVENT_SCHEMA) as f:
        SENTRY_EVENT_SCHEMA = json.load(f)

try:
    import pytest_benchmark
except ImportError:

    @pytest.fixture
    def benchmark():
        return lambda x: x()

else:
    del pytest_benchmark


from sentry_sdk import scope


@pytest.fixture(autouse=True)
def clean_scopes():
    """
    Resets the scopes for every test to avoid leaking data between tests.
    """
    scope._global_scope = None
    scope._isolation_scope.set(None)
    scope._current_scope.set(None)


@pytest.fixture(autouse=True)
def internal_exceptions(request):
    errors = []
    if "tests_internal_exceptions" in request.keywords:
        return

    def _capture_internal_exception(exc_info):
        errors.append(exc_info)

    @request.addfinalizer
    def _():
        # reraise the errors so that this just acts as a pass-through (that
        # happens to keep track of the errors which pass through it)
        for e in errors:
            reraise(*e)

    sentry_sdk.utils.capture_internal_exception = _capture_internal_exception

    return errors


@pytest.fixture(autouse=True, scope="session")
def _capture_internal_warnings():
    yield

    _warning_recorder_mgr.__exit__(None, None, None)
    recorder = _warning_recorder

    for warning in recorder:
        try:
            if isinstance(warning.message, ResourceWarning):
                continue
        except NameError:
            pass

        if "sentry_sdk" not in str(warning.filename) and "sentry-sdk" not in str(
            warning.filename
        ):
            continue

        # pytest-django
        if "getfuncargvalue" in str(warning.message):
            continue

        # Happens when re-initializing the SDK
        if "but it was only enabled on init()" in str(warning.message):
            continue

        # sanic's usage of aiohttp for test client
        if "verify_ssl is deprecated, use ssl=False instead" in str(warning.message):
            continue

        if "getargspec" in str(warning.message) and warning.filename.endswith(
            ("pyramid/config/util.py", "pyramid/config/views.py")
        ):
            continue

        if "isAlive() is deprecated" in str(
            warning.message
        ) and warning.filename.endswith("celery/utils/timer2.py"):
            continue

        if "collections.abc" in str(warning.message) and warning.filename.endswith(
            ("celery/canvas.py", "werkzeug/datastructures.py", "tornado/httputil.py")
        ):
            continue

        # Django 1.7 emits a (seemingly) false-positive warning for our test
        # app and suggests to use a middleware that does not exist in later
        # Django versions.
        if "SessionAuthenticationMiddleware" in str(warning.message):
            continue

        if "Something has already installed a non-asyncio" in str(warning.message):
            continue

        if "dns.hash" in str(warning.message) or "dns/namedict" in warning.filename:
            continue

        raise AssertionError(warning)


@pytest.fixture
def validate_event_schema(tmpdir):
    def inner(event):
        if SENTRY_EVENT_SCHEMA:
            jsonschema.validate(instance=event, schema=SENTRY_EVENT_SCHEMA)

    return inner


@pytest.fixture
def reset_integrations():
    """
    Use with caution, sometimes we really need to start
    with a clean slate to ensure monkeypatching works well,
    but this also means some other stuff will be monkeypatched twice.
    """
    global _DEFAULT_INTEGRATIONS, _processed_integrations
    try:
        _DEFAULT_INTEGRATIONS.remove(
            "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration"
        )
    except ValueError:
        pass
    _processed_integrations.clear()
    _installed_integrations.clear()


@pytest.fixture
def sentry_init(request):
    def inner(*a, **kw):
        kw.setdefault("transport", TestTransport())
        client = sentry_sdk.Client(*a, **kw)
        sentry_sdk.get_global_scope().set_client(client)

    if request.node.get_closest_marker("forked"):
        # Do not run isolation if the test is already running in
        # ultimate isolation (seems to be required for celery tests that
        # fork)
        yield inner
    else:
        old_client = sentry_sdk.get_global_scope().client
        try:
            sentry_sdk.get_current_scope().set_client(None)
            yield inner
        finally:
            sentry_sdk.get_global_scope().set_client(old_client)


class TestTransport(Transport):
    def __init__(self):
        Transport.__init__(self)

    def capture_envelope(self, _: Envelope) -> None:
        """No-op capture_envelope for tests"""
        pass


@pytest.fixture
def capture_events(monkeypatch):
    def inner():
        events = []
        test_client = sentry_sdk.get_client()
        old_capture_envelope = test_client.transport.capture_envelope

        def append_event(envelope):
            for item in envelope:
                if item.headers.get("type") in ("event", "transaction"):
                    events.append(item.payload.json)
            return old_capture_envelope(envelope)

        monkeypatch.setattr(test_client.transport, "capture_envelope", append_event)

        return events

    return inner


@pytest.fixture
def capture_envelopes(monkeypatch):
    def inner():
        envelopes = []
        test_client = sentry_sdk.get_client()
        old_capture_envelope = test_client.transport.capture_envelope

        def append_envelope(envelope):
            envelopes.append(envelope)
            return old_capture_envelope(envelope)

        monkeypatch.setattr(test_client.transport, "capture_envelope", append_envelope)

        return envelopes

    return inner


@pytest.fixture
def capture_record_lost_event_calls(monkeypatch):
    def inner():
        calls = []
        test_client = sentry_sdk.get_client()

        def record_lost_event(reason, data_category=None, item=None, *, quantity=1):
            calls.append((reason, data_category, item, quantity))

        monkeypatch.setattr(
            test_client.transport, "record_lost_event", record_lost_event
        )
        return calls

    return inner


@pytest.fixture
def capture_events_forksafe(monkeypatch, capture_events, request):
    def inner():
        capture_events()

        events_r, events_w = os.pipe()
        events_r = os.fdopen(events_r, "rb", 0)
        events_w = os.fdopen(events_w, "wb", 0)

        test_client = sentry_sdk.get_client()

        old_capture_envelope = test_client.transport.capture_envelope

        def append(envelope):
            event = envelope.get_event() or envelope.get_transaction_event()
            if event is not None:
                events_w.write(json.dumps(event).encode("utf-8"))
                events_w.write(b"\n")
            return old_capture_envelope(envelope)

        def flush(timeout=None, callback=None):
            events_w.write(b"flush\n")

        monkeypatch.setattr(test_client.transport, "capture_envelope", append)
        monkeypatch.setattr(test_client, "flush", flush)

        return EventStreamReader(events_r, events_w)

    return inner


class EventStreamReader:
    def __init__(self, read_file, write_file):
        self.read_file = read_file
        self.write_file = write_file

    def read_event(self):
        return json.loads(self.read_file.readline().decode("utf-8"))

    def read_flush(self):
        assert self.read_file.readline() == b"flush\n"


# scope=session ensures that fixture is run earlier
@pytest.fixture(
    scope="session",
    params=[None, "eventlet", "gevent"],
    ids=("threads", "eventlet", "greenlet"),
)
def maybe_monkeypatched_threading(request):
    if request.param == "eventlet":
        if eventlet is None:
            pytest.skip("no eventlet installed")

        try:
            eventlet.monkey_patch()
        except AttributeError as e:
            if "'thread.RLock' object has no attribute" in str(e):
                # https://bitbucket.org/pypy/pypy/issues/2962/gevent-cannot-patch-rlock-under-pypy-27-7
                pytest.skip("https://github.com/eventlet/eventlet/issues/546")
            else:
                raise
    elif request.param == "gevent":
        if gevent is None:
            pytest.skip("no gevent installed")
        try:
            gevent.monkey.patch_all()
        except Exception as e:
            if "_RLock__owner" in str(e):
                pytest.skip("https://github.com/gevent/gevent/issues/1380")
            else:
                raise
    else:
        assert request.param is None

    return request.param


@pytest.fixture
def render_span_tree():
    def inner(event):
        assert event["type"] == "transaction"

        by_parent = {}
        for span in event["spans"]:
            by_parent.setdefault(span["parent_span_id"], []).append(span)

        def render_span(span):
            yield "- op={}: description={}".format(
                json.dumps(span.get("op")), json.dumps(span.get("description"))
            )
            for subspan in by_parent.get(span["span_id"]) or ():
                for line in render_span(subspan):
                    yield "  {}".format(line)

        root_span = event["contexts"]["trace"]

        # Return a list instead of a multiline string because black will know better how to format that
        return "\n".join(render_span(root_span))

    return inner


@pytest.fixture(name="StringContaining")
def string_containing_matcher():
    """
    An object which matches any string containing the substring passed to the
    object at instantiation time.

    Useful for assert_called_with, assert_any_call, etc.

    Used like this:

    >>> f = mock.Mock()
    >>> f("dogs are great")
    >>> f.assert_any_call("dogs") # will raise AssertionError
    Traceback (most recent call last):
        ...
    AssertionError: mock('dogs') call not found
    >>> f.assert_any_call(StringContaining("dogs")) # no AssertionError

    """

    class StringContaining:
        def __init__(self, substring):
            self.substring = substring
            self.valid_types = (str, bytes)

        def __eq__(self, test_string):
            if not isinstance(test_string, self.valid_types):
                return False

            # this is safe even in py2 because as of 2.6, `bytes` exists in py2
            # as an alias for `str`
            if isinstance(test_string, bytes):
                test_string = test_string.decode()

            if len(self.substring) > len(test_string):
                return False

            return self.substring in test_string

        def __ne__(self, test_string):
            return not self.__eq__(test_string)

    return StringContaining


def _safe_is_equal(x, y):
    """
    Compares two values, preferring to use the first's __eq__ method if it
    exists and is implemented.

    Accounts for py2/py3 differences (like ints in py2 not having a __eq__
    method), as well as the incomparability of certain types exposed by using
    raw __eq__ () rather than ==.
    """

    # Prefer using __eq__ directly to ensure that examples like
    #
    #   maisey = Dog()
    #   maisey.name = "Maisey the Dog"
    #   maisey == ObjectDescribedBy(attrs={"name": StringContaining("Maisey")})
    #
    # evaluate to True (in other words, examples where the values in self.attrs
    # might also have custom __eq__ methods; this makes sure those methods get
    # used if possible)
    try:
        is_equal = x.__eq__(y)
    except AttributeError:
        is_equal = NotImplemented

    # this can happen on its own, too (i.e. without an AttributeError being
    # thrown), which is why this is separate from the except block above
    if is_equal == NotImplemented:
        # using == smoothes out weird variations exposed by raw __eq__
        return x == y

    return is_equal


@pytest.fixture(name="DictionaryContaining")
def dictionary_containing_matcher():
    """
    An object which matches any dictionary containing all key-value pairs from
    the dictionary passed to the object at instantiation time.

    Useful for assert_called_with, assert_any_call, etc.

    Used like this:

    >>> f = mock.Mock()
    >>> f({"dogs": "yes", "cats": "maybe"})
    >>> f.assert_any_call({"dogs": "yes"}) # will raise AssertionError
    Traceback (most recent call last):
        ...
    AssertionError: mock({'dogs': 'yes'}) call not found
    >>> f.assert_any_call(DictionaryContaining({"dogs": "yes"})) # no AssertionError
    """

    class DictionaryContaining:
        def __init__(self, subdict):
            self.subdict = subdict

        def __eq__(self, test_dict):
            if not isinstance(test_dict, dict):
                return False

            if len(self.subdict) > len(test_dict):
                return False

            for key, value in self.subdict.items():
                try:
                    test_value = test_dict[key]
                except KeyError:  # missing key
                    return False

                if not _safe_is_equal(value, test_value):
                    return False

            return True

        def __ne__(self, test_dict):
            return not self.__eq__(test_dict)

    return DictionaryContaining


@pytest.fixture(name="ObjectDescribedBy")
def object_described_by_matcher():
    """
    An object which matches any other object with the given properties.

    Available properties currently are "type" (a type object) and "attrs" (a
    dictionary).

    Useful for assert_called_with, assert_any_call, etc.

    Used like this:

    >>> class Dog:
    ...     pass
    ...
    >>> maisey = Dog()
    >>> maisey.name = "Maisey"
    >>> maisey.age = 7
    >>> f = mock.Mock()
    >>> f(maisey)
    >>> f.assert_any_call(ObjectDescribedBy(type=Dog)) # no AssertionError
    >>> f.assert_any_call(ObjectDescribedBy(attrs={"name": "Maisey"})) # no AssertionError
    """

    class ObjectDescribedBy:
        def __init__(self, type=None, attrs=None):
            self.type = type
            self.attrs = attrs

        def __eq__(self, test_obj):
            if self.type:
                if not isinstance(test_obj, self.type):
                    return False

            if self.attrs:
                for attr_name, attr_value in self.attrs.items():
                    try:
                        test_value = getattr(test_obj, attr_name)
                    except AttributeError:  # missing attribute
                        return False

                    if not _safe_is_equal(attr_value, test_value):
                        return False

            return True

        def __ne__(self, test_obj):
            return not self.__eq__(test_obj)

    return ObjectDescribedBy


@pytest.fixture
def teardown_profiling():
    # Make sure that a previous test didn't leave the profiler running
    teardown_profiler()
    teardown_continuous_profiler()

    yield

    # Make sure that to shut down the profiler after the test
    teardown_profiler()
    teardown_continuous_profiler()


@pytest.fixture()
def suppress_deprecation_warnings():
    """
    Use this fixture to suppress deprecation warnings in a test.
    Useful for testing deprecated SDK features.
    """
    with warnings.catch_warnings():
        warnings.simplefilter("ignore", DeprecationWarning)
        yield


class MockServerRequestHandler(BaseHTTPRequestHandler):
    def do_GET(self):  # noqa: N802
        # Process an HTTP GET request and return a response with an HTTP 200 status.
        self.send_response(200)
        self.end_headers()
        return


def get_free_port():
    s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)
    s.bind(("localhost", 0))
    _, port = s.getsockname()
    s.close()
    return port


def create_mock_http_server():
    # Start a mock server to test outgoing http requests
    mock_server_port = get_free_port()
    mock_server = HTTPServer(("localhost", mock_server_port), MockServerRequestHandler)
    mock_server_thread = Thread(target=mock_server.serve_forever)
    mock_server_thread.setDaemon(True)
    mock_server_thread.start()

    return mock_server_port


def unpack_werkzeug_response(response):
    # werkzeug < 2.1 returns a tuple as client response, newer versions return
    # an object
    try:
        return response.get_data(), response.status, response.headers
    except AttributeError:
        content, status, headers = response
        return b"".join(content), status, headers


def werkzeug_set_cookie(client, servername, key, value):
    # client.set_cookie has a different signature in different werkzeug versions
    try:
        client.set_cookie(servername, key, value)
    except TypeError:
        client.set_cookie(key, value)


@contextmanager
def patch_start_tracing_child(fake_transaction_is_none=False):
    # type: (bool) -> Iterator[Optional[mock.MagicMock]]
    if not fake_transaction_is_none:
        fake_transaction = mock.MagicMock()
        fake_start_child = mock.MagicMock()
        fake_transaction.start_child = fake_start_child
    else:
        fake_transaction = None
        fake_start_child = None

    with mock.patch(
        "sentry_sdk.tracing_utils.get_current_span", return_value=fake_transaction
    ):
        yield fake_start_child


class ApproxDict(dict):
    def __eq__(self, other):
        # For an ApproxDict to equal another dict, the other dict just needs to contain
        # all the keys from the ApproxDict with the same values.
        #
        # The other dict may contain additional keys with any value.
        return all(key in other and other[key] == value for key, value in self.items())

    def __ne__(self, other):
        return not self.__eq__(other)
sentry-python-2.18.0/tests/integrations/000077500000000000000000000000001471214654000203215ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/__init__.py000066400000000000000000000000001471214654000224200ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/aiohttp/000077500000000000000000000000001471214654000217715ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/aiohttp/__init__.py000066400000000000000000000000561471214654000241030ustar00rootroot00000000000000import pytest

pytest.importorskip("aiohttp")
sentry-python-2.18.0/tests/integrations/aiohttp/test_aiohttp.py000066400000000000000000000476441471214654000250710ustar00rootroot00000000000000import asyncio
import json
from contextlib import suppress
from unittest import mock

import pytest
from aiohttp import web, ClientSession
from aiohttp.client import ServerDisconnectedError
from aiohttp.web_request import Request
from aiohttp.web_exceptions import (
    HTTPInternalServerError,
    HTTPNetworkAuthenticationRequired,
    HTTPBadRequest,
    HTTPNotFound,
    HTTPUnavailableForLegalReasons,
)

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
from tests.conftest import ApproxDict


@pytest.mark.asyncio
async def test_basic(sentry_init, aiohttp_client, capture_events):
    sentry_init(integrations=[AioHttpIntegration()])

    async def hello(request):
        1 / 0

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 500

    (event,) = events

    assert (
        event["transaction"]
        == "tests.integrations.aiohttp.test_aiohttp.test_basic..hello"
    )

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    request = event["request"]
    host = request["headers"]["Host"]

    assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
    assert request["method"] == "GET"
    assert request["query_string"] == ""
    assert request.get("data") is None
    assert request["url"] == "http://{host}/".format(host=host)
    assert request["headers"] == {
        "Accept": "*/*",
        "Accept-Encoding": mock.ANY,
        "Host": host,
        "User-Agent": request["headers"]["User-Agent"],
        "baggage": mock.ANY,
        "sentry-trace": mock.ANY,
    }


@pytest.mark.asyncio
async def test_post_body_not_read(sentry_init, aiohttp_client, capture_events):
    from sentry_sdk.integrations.aiohttp import BODY_NOT_READ_MESSAGE

    sentry_init(integrations=[AioHttpIntegration()])

    body = {"some": "value"}

    async def hello(request):
        1 / 0

    app = web.Application()
    app.router.add_post("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.post("/", json=body)
    assert resp.status == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    request = event["request"]

    assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
    assert request["method"] == "POST"
    assert request["data"] == BODY_NOT_READ_MESSAGE


@pytest.mark.asyncio
async def test_post_body_read(sentry_init, aiohttp_client, capture_events):
    sentry_init(integrations=[AioHttpIntegration()])

    body = {"some": "value"}

    async def hello(request):
        await request.json()
        1 / 0

    app = web.Application()
    app.router.add_post("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.post("/", json=body)
    assert resp.status == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    request = event["request"]

    assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
    assert request["method"] == "POST"
    assert request["data"] == json.dumps(body)


@pytest.mark.asyncio
async def test_403_not_captured(sentry_init, aiohttp_client, capture_events):
    sentry_init(integrations=[AioHttpIntegration()])

    async def hello(request):
        raise web.HTTPForbidden()

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 403

    assert not events


@pytest.mark.asyncio
async def test_cancelled_error_not_captured(
    sentry_init, aiohttp_client, capture_events
):
    sentry_init(integrations=[AioHttpIntegration()])

    async def hello(request):
        raise asyncio.CancelledError()

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()
    client = await aiohttp_client(app)

    with suppress(ServerDisconnectedError):
        # Intended `aiohttp` interaction: server will disconnect if it
        # encounters `asyncio.CancelledError`
        await client.get("/")

    assert not events


@pytest.mark.asyncio
async def test_half_initialized(sentry_init, aiohttp_client, capture_events):
    sentry_init(integrations=[AioHttpIntegration()])
    sentry_init()

    async def hello(request):
        return web.Response(text="hello")

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 200

    assert events == []


@pytest.mark.asyncio
async def test_tracing(sentry_init, aiohttp_client, capture_events):
    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)

    async def hello(request):
        return web.Response(text="hello")

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 200

    (event,) = events

    assert event["type"] == "transaction"
    assert (
        event["transaction"]
        == "tests.integrations.aiohttp.test_aiohttp.test_tracing..hello"
    )


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        (
            "/message",
            "handler_name",
            "tests.integrations.aiohttp.test_aiohttp.test_transaction_style..hello",
            "component",
        ),
        (
            "/message",
            "method_and_path_pattern",
            "GET /{var}",
            "route",
        ),
    ],
)
async def test_transaction_style(
    sentry_init,
    aiohttp_client,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(
        integrations=[AioHttpIntegration(transaction_style=transaction_style)],
        traces_sample_rate=1.0,
    )

    async def hello(request):
        return web.Response(text="hello")

    app = web.Application()
    app.router.add_get(r"/{var}", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get(url)
    assert resp.status == 200

    (event,) = events

    assert event["type"] == "transaction"
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


@pytest.mark.tests_internal_exceptions
@pytest.mark.asyncio
async def test_tracing_unparseable_url(sentry_init, aiohttp_client, capture_events):
    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)

    async def hello(request):
        return web.Response(text="hello")

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    with mock.patch(
        "sentry_sdk.integrations.aiohttp.parse_url", side_effect=ValueError
    ):
        resp = await client.get("/")

    assert resp.status == 200

    (event,) = events

    assert event["type"] == "transaction"
    assert (
        event["transaction"]
        == "tests.integrations.aiohttp.test_aiohttp.test_tracing_unparseable_url..hello"
    )


@pytest.mark.asyncio
async def test_traces_sampler_gets_request_object_in_sampling_context(
    sentry_init,
    aiohttp_client,
    DictionaryContaining,  # noqa: N803
    ObjectDescribedBy,  # noqa: N803
):
    traces_sampler = mock.Mock()
    sentry_init(
        integrations=[AioHttpIntegration()],
        traces_sampler=traces_sampler,
    )

    async def kangaroo_handler(request):
        return web.Response(text="dogs are great")

    app = web.Application()
    app.router.add_get("/tricks/kangaroo", kangaroo_handler)

    client = await aiohttp_client(app)
    await client.get("/tricks/kangaroo")

    traces_sampler.assert_any_call(
        DictionaryContaining(
            {
                "aiohttp_request": ObjectDescribedBy(
                    type=Request, attrs={"method": "GET", "path": "/tricks/kangaroo"}
                )
            }
        )
    )


@pytest.mark.asyncio
async def test_has_trace_if_performance_enabled(
    sentry_init, aiohttp_client, capture_events
):
    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)

    async def hello(request):
        capture_message("It's a good day to try dividing by 0")
        1 / 0

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 500

    msg_event, error_event, transaction_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
        == msg_event["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.asyncio
async def test_has_trace_if_performance_disabled(
    sentry_init, aiohttp_client, capture_events
):
    sentry_init(integrations=[AioHttpIntegration()])

    async def hello(request):
        capture_message("It's a good day to try dividing by 0")
        1 / 0

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 500

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        error_event["contexts"]["trace"]["trace_id"]
        == msg_event["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_enabled(
    sentry_init, aiohttp_client, capture_events
):
    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)

    async def hello(request):
        capture_message("It's a good day to try dividing by 0")
        1 / 0

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    # The aiohttp_client is instrumented so will generate the sentry-trace header and add request.
    # Get the sentry-trace header from the request so we can later compare with transaction events.
    client = await aiohttp_client(app)
    with start_transaction():
        # Headers are only added to the span if there is an active transaction
        resp = await client.get("/")

    sentry_trace_header = resp.request_info.headers.get("sentry-trace")
    trace_id = sentry_trace_header.split("-")[0]

    assert resp.status == 500

    # Last item is the custom transaction event wrapping `client.get("/")`
    msg_event, error_event, transaction_event, _ = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id


@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_disabled(
    sentry_init, aiohttp_client, capture_events
):
    sentry_init(integrations=[AioHttpIntegration()])

    async def hello(request):
        capture_message("It's a good day to try dividing by 0")
        1 / 0

    app = web.Application()
    app.router.add_get("/", hello)

    events = capture_events()

    # The aiohttp_client is instrumented so will generate the sentry-trace header and add request.
    # Get the sentry-trace header from the request so we can later compare with transaction events.
    client = await aiohttp_client(app)
    resp = await client.get("/")
    sentry_trace_header = resp.request_info.headers.get("sentry-trace")
    trace_id = sentry_trace_header.split("-")[0]

    assert resp.status == 500

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id


@pytest.mark.asyncio
async def test_crumb_capture(
    sentry_init, aiohttp_raw_server, aiohttp_client, loop, capture_events
):
    def before_breadcrumb(crumb, hint):
        crumb["data"]["extra"] = "foo"
        return crumb

    sentry_init(
        integrations=[AioHttpIntegration()], before_breadcrumb=before_breadcrumb
    )

    async def handler(request):
        return web.Response(text="OK")

    raw_server = await aiohttp_raw_server(handler)

    with start_transaction():
        events = capture_events()

        client = await aiohttp_client(raw_server)
        resp = await client.get("/")
        assert resp.status == 200
        capture_message("Testing!")

        (event,) = events

        crumb = event["breadcrumbs"]["values"][0]
        assert crumb["type"] == "http"
        assert crumb["category"] == "httplib"
        assert crumb["data"] == ApproxDict(
            {
                "url": "http://127.0.0.1:{}/".format(raw_server.port),
                "http.fragment": "",
                "http.method": "GET",
                "http.query": "",
                "http.response.status_code": 200,
                "reason": "OK",
                "extra": "foo",
            }
        )


@pytest.mark.asyncio
async def test_outgoing_trace_headers(sentry_init, aiohttp_raw_server, aiohttp_client):
    sentry_init(
        integrations=[AioHttpIntegration()],
        traces_sample_rate=1.0,
    )

    async def handler(request):
        return web.Response(text="OK")

    raw_server = await aiohttp_raw_server(handler)

    with start_transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        # make trace_id difference between transactions
        trace_id="0123456789012345678901234567890",
    ) as transaction:
        client = await aiohttp_client(raw_server)
        resp = await client.get("/")
        request_span = transaction._span_recorder.spans[-1]

        assert resp.request_info.headers[
            "sentry-trace"
        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
            trace_id=transaction.trace_id,
            parent_span_id=request_span.span_id,
            sampled=1,
        )


@pytest.mark.asyncio
async def test_outgoing_trace_headers_append_to_baggage(
    sentry_init, aiohttp_raw_server, aiohttp_client
):
    sentry_init(
        integrations=[AioHttpIntegration()],
        traces_sample_rate=1.0,
        release="d08ebdb9309e1b004c6f52202de58a09c2268e42",
    )

    async def handler(request):
        return web.Response(text="OK")

    raw_server = await aiohttp_raw_server(handler)

    with start_transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="0123456789012345678901234567890",
    ):
        client = await aiohttp_client(raw_server)
        resp = await client.get("/", headers={"bagGage": "custom=value"})

        assert (
            resp.request_info.headers["baggage"]
            == "custom=value,sentry-trace_id=0123456789012345678901234567890,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true"
        )


@pytest.mark.asyncio
async def test_span_origin(
    sentry_init,
    aiohttp_client,
    capture_events,
):
    sentry_init(
        integrations=[AioHttpIntegration()],
        traces_sample_rate=1.0,
    )

    async def hello(request):
        async with ClientSession() as session:
            async with session.get("http://example.com"):
                return web.Response(text="hello")

    app = web.Application()
    app.router.add_get(r"/", hello)

    events = capture_events()

    client = await aiohttp_client(app)
    await client.get("/")

    (event,) = events
    assert event["contexts"]["trace"]["origin"] == "auto.http.aiohttp"
    assert event["spans"][0]["origin"] == "auto.http.aiohttp"


@pytest.mark.parametrize(
    ("integration_kwargs", "exception_to_raise", "should_capture"),
    (
        ({}, None, False),
        ({}, HTTPBadRequest, False),
        (
            {},
            HTTPUnavailableForLegalReasons(None),
            False,
        ),  # Highest 4xx status code (451)
        ({}, HTTPInternalServerError, True),
        ({}, HTTPNetworkAuthenticationRequired, True),  # Highest 5xx status code (511)
        ({"failed_request_status_codes": set()}, HTTPInternalServerError, False),
        (
            {"failed_request_status_codes": set()},
            HTTPNetworkAuthenticationRequired,
            False,
        ),
        ({"failed_request_status_codes": {404, *range(500, 600)}}, HTTPNotFound, True),
        (
            {"failed_request_status_codes": {404, *range(500, 600)}},
            HTTPInternalServerError,
            True,
        ),
        (
            {"failed_request_status_codes": {404, *range(500, 600)}},
            HTTPBadRequest,
            False,
        ),
    ),
)
@pytest.mark.asyncio
async def test_failed_request_status_codes(
    sentry_init,
    aiohttp_client,
    capture_events,
    integration_kwargs,
    exception_to_raise,
    should_capture,
):
    sentry_init(integrations=[AioHttpIntegration(**integration_kwargs)])
    events = capture_events()

    async def handle(_):
        if exception_to_raise is not None:
            raise exception_to_raise
        else:
            return web.Response(status=200)

    app = web.Application()
    app.router.add_get("/", handle)

    client = await aiohttp_client(app)
    resp = await client.get("/")

    expected_status = (
        200 if exception_to_raise is None else exception_to_raise.status_code
    )
    assert resp.status == expected_status

    if should_capture:
        (event,) = events
        assert event["exception"]["values"][0]["type"] == exception_to_raise.__name__
    else:
        assert not events


@pytest.mark.asyncio
async def test_failed_request_status_codes_with_returned_status(
    sentry_init, aiohttp_client, capture_events
):
    """
    Returning a web.Response with a failed_request_status_code should not be reported to Sentry.
    """
    sentry_init(integrations=[AioHttpIntegration(failed_request_status_codes={500})])
    events = capture_events()

    async def handle(_):
        return web.Response(status=500)

    app = web.Application()
    app.router.add_get("/", handle)

    client = await aiohttp_client(app)
    resp = await client.get("/")

    assert resp.status == 500
    assert not events


@pytest.mark.asyncio
async def test_failed_request_status_codes_non_http_exception(
    sentry_init, aiohttp_client, capture_events
):
    """
    If an exception, which is not an instance of HTTPException, is raised, it should be captured, even if
    failed_request_status_codes is empty.
    """
    sentry_init(integrations=[AioHttpIntegration(failed_request_status_codes=set())])
    events = capture_events()

    async def handle(_):
        1 / 0

    app = web.Application()
    app.router.add_get("/", handle)

    client = await aiohttp_client(app)
    resp = await client.get("/")
    assert resp.status == 500

    (event,) = events
    assert event["exception"]["values"][0]["type"] == "ZeroDivisionError"
sentry-python-2.18.0/tests/integrations/anthropic/000077500000000000000000000000001471214654000223105ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/anthropic/__init__.py000066400000000000000000000000601471214654000244150ustar00rootroot00000000000000import pytest

pytest.importorskip("anthropic")
sentry-python-2.18.0/tests/integrations/anthropic/test_anthropic.py000066400000000000000000000573511471214654000257230ustar00rootroot00000000000000from unittest import mock

try:
    from unittest.mock import AsyncMock
except ImportError:

    class AsyncMock(mock.MagicMock):
        async def __call__(self, *args, **kwargs):
            return super(AsyncMock, self).__call__(*args, **kwargs)


import pytest
from anthropic import AsyncAnthropic, Anthropic, AnthropicError, AsyncStream, Stream
from anthropic.types import MessageDeltaUsage, TextDelta, Usage
from anthropic.types.content_block_delta_event import ContentBlockDeltaEvent
from anthropic.types.content_block_start_event import ContentBlockStartEvent
from anthropic.types.content_block_stop_event import ContentBlockStopEvent
from anthropic.types.message import Message
from anthropic.types.message_delta_event import MessageDeltaEvent
from anthropic.types.message_start_event import MessageStartEvent

from sentry_sdk.utils import package_version

try:
    from anthropic.types import InputJSONDelta
except ImportError:
    try:
        from anthropic.types import InputJsonDelta as InputJSONDelta
    except ImportError:
        pass

try:
    # 0.27+
    from anthropic.types.raw_message_delta_event import Delta
    from anthropic.types.tool_use_block import ToolUseBlock
except ImportError:
    # pre 0.27
    from anthropic.types.message_delta_event import Delta

try:
    from anthropic.types.text_block import TextBlock
except ImportError:
    from anthropic.types.content_block import ContentBlock as TextBlock

from sentry_sdk import start_transaction
from sentry_sdk.consts import OP, SPANDATA
from sentry_sdk.integrations.anthropic import AnthropicIntegration

ANTHROPIC_VERSION = package_version("anthropic")
EXAMPLE_MESSAGE = Message(
    id="id",
    model="model",
    role="assistant",
    content=[TextBlock(type="text", text="Hi, I'm Claude.")],
    type="message",
    usage=Usage(input_tokens=10, output_tokens=20),
)


async def async_iterator(values):
    for value in values:
        yield value


@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [
        (True, True),
        (True, False),
        (False, True),
        (False, False),
    ],
)
def test_nonstreaming_create_message(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    sentry_init(
        integrations=[AnthropicIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()
    client = Anthropic(api_key="z")
    client.messages._post = mock.Mock(return_value=EXAMPLE_MESSAGE)

    messages = [
        {
            "role": "user",
            "content": "Hello, Claude",
        }
    ]

    with start_transaction(name="anthropic"):
        response = client.messages.create(
            max_tokens=1024, messages=messages, model="model"
        )

    assert response == EXAMPLE_MESSAGE
    usage = response.usage

    assert usage.input_tokens == 10
    assert usage.output_tokens == 20

    assert len(events) == 1
    (event,) = events

    assert event["type"] == "transaction"
    assert event["transaction"] == "anthropic"

    assert len(event["spans"]) == 1
    (span,) = event["spans"]

    assert span["op"] == OP.ANTHROPIC_MESSAGES_CREATE
    assert span["description"] == "Anthropic messages create"
    assert span["data"][SPANDATA.AI_MODEL_ID] == "model"

    if send_default_pii and include_prompts:
        assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages
        assert span["data"][SPANDATA.AI_RESPONSES] == [
            {"type": "text", "text": "Hi, I'm Claude."}
        ]
    else:
        assert SPANDATA.AI_INPUT_MESSAGES not in span["data"]
        assert SPANDATA.AI_RESPONSES not in span["data"]

    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10
    assert span["measurements"]["ai_completion_tokens_used"]["value"] == 20
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 30
    assert span["data"]["ai.streaming"] is False


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [
        (True, True),
        (True, False),
        (False, True),
        (False, False),
    ],
)
async def test_nonstreaming_create_message_async(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    sentry_init(
        integrations=[AnthropicIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()
    client = AsyncAnthropic(api_key="z")
    client.messages._post = AsyncMock(return_value=EXAMPLE_MESSAGE)

    messages = [
        {
            "role": "user",
            "content": "Hello, Claude",
        }
    ]

    with start_transaction(name="anthropic"):
        response = await client.messages.create(
            max_tokens=1024, messages=messages, model="model"
        )

    assert response == EXAMPLE_MESSAGE
    usage = response.usage

    assert usage.input_tokens == 10
    assert usage.output_tokens == 20

    assert len(events) == 1
    (event,) = events

    assert event["type"] == "transaction"
    assert event["transaction"] == "anthropic"

    assert len(event["spans"]) == 1
    (span,) = event["spans"]

    assert span["op"] == OP.ANTHROPIC_MESSAGES_CREATE
    assert span["description"] == "Anthropic messages create"
    assert span["data"][SPANDATA.AI_MODEL_ID] == "model"

    if send_default_pii and include_prompts:
        assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages
        assert span["data"][SPANDATA.AI_RESPONSES] == [
            {"type": "text", "text": "Hi, I'm Claude."}
        ]
    else:
        assert SPANDATA.AI_INPUT_MESSAGES not in span["data"]
        assert SPANDATA.AI_RESPONSES not in span["data"]

    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10
    assert span["measurements"]["ai_completion_tokens_used"]["value"] == 20
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 30
    assert span["data"]["ai.streaming"] is False


@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [
        (True, True),
        (True, False),
        (False, True),
        (False, False),
    ],
)
def test_streaming_create_message(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    client = Anthropic(api_key="z")
    returned_stream = Stream(cast_to=None, response=None, client=client)
    returned_stream._iterator = [
        MessageStartEvent(
            message=EXAMPLE_MESSAGE,
            type="message_start",
        ),
        ContentBlockStartEvent(
            type="content_block_start",
            index=0,
            content_block=TextBlock(type="text", text=""),
        ),
        ContentBlockDeltaEvent(
            delta=TextDelta(text="Hi", type="text_delta"),
            index=0,
            type="content_block_delta",
        ),
        ContentBlockDeltaEvent(
            delta=TextDelta(text="!", type="text_delta"),
            index=0,
            type="content_block_delta",
        ),
        ContentBlockDeltaEvent(
            delta=TextDelta(text=" I'm Claude!", type="text_delta"),
            index=0,
            type="content_block_delta",
        ),
        ContentBlockStopEvent(type="content_block_stop", index=0),
        MessageDeltaEvent(
            delta=Delta(),
            usage=MessageDeltaUsage(output_tokens=10),
            type="message_delta",
        ),
    ]

    sentry_init(
        integrations=[AnthropicIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()
    client.messages._post = mock.Mock(return_value=returned_stream)

    messages = [
        {
            "role": "user",
            "content": "Hello, Claude",
        }
    ]

    with start_transaction(name="anthropic"):
        message = client.messages.create(
            max_tokens=1024, messages=messages, model="model", stream=True
        )

        for _ in message:
            pass

    assert message == returned_stream
    assert len(events) == 1
    (event,) = events

    assert event["type"] == "transaction"
    assert event["transaction"] == "anthropic"

    assert len(event["spans"]) == 1
    (span,) = event["spans"]

    assert span["op"] == OP.ANTHROPIC_MESSAGES_CREATE
    assert span["description"] == "Anthropic messages create"
    assert span["data"][SPANDATA.AI_MODEL_ID] == "model"

    if send_default_pii and include_prompts:
        assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages
        assert span["data"][SPANDATA.AI_RESPONSES] == [
            {"type": "text", "text": "Hi! I'm Claude!"}
        ]

    else:
        assert SPANDATA.AI_INPUT_MESSAGES not in span["data"]
        assert SPANDATA.AI_RESPONSES not in span["data"]

    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10
    assert span["measurements"]["ai_completion_tokens_used"]["value"] == 30
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 40
    assert span["data"]["ai.streaming"] is True


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [
        (True, True),
        (True, False),
        (False, True),
        (False, False),
    ],
)
async def test_streaming_create_message_async(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    client = AsyncAnthropic(api_key="z")
    returned_stream = AsyncStream(cast_to=None, response=None, client=client)
    returned_stream._iterator = async_iterator(
        [
            MessageStartEvent(
                message=EXAMPLE_MESSAGE,
                type="message_start",
            ),
            ContentBlockStartEvent(
                type="content_block_start",
                index=0,
                content_block=TextBlock(type="text", text=""),
            ),
            ContentBlockDeltaEvent(
                delta=TextDelta(text="Hi", type="text_delta"),
                index=0,
                type="content_block_delta",
            ),
            ContentBlockDeltaEvent(
                delta=TextDelta(text="!", type="text_delta"),
                index=0,
                type="content_block_delta",
            ),
            ContentBlockDeltaEvent(
                delta=TextDelta(text=" I'm Claude!", type="text_delta"),
                index=0,
                type="content_block_delta",
            ),
            ContentBlockStopEvent(type="content_block_stop", index=0),
            MessageDeltaEvent(
                delta=Delta(),
                usage=MessageDeltaUsage(output_tokens=10),
                type="message_delta",
            ),
        ]
    )

    sentry_init(
        integrations=[AnthropicIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()
    client.messages._post = AsyncMock(return_value=returned_stream)

    messages = [
        {
            "role": "user",
            "content": "Hello, Claude",
        }
    ]

    with start_transaction(name="anthropic"):
        message = await client.messages.create(
            max_tokens=1024, messages=messages, model="model", stream=True
        )

        async for _ in message:
            pass

    assert message == returned_stream
    assert len(events) == 1
    (event,) = events

    assert event["type"] == "transaction"
    assert event["transaction"] == "anthropic"

    assert len(event["spans"]) == 1
    (span,) = event["spans"]

    assert span["op"] == OP.ANTHROPIC_MESSAGES_CREATE
    assert span["description"] == "Anthropic messages create"
    assert span["data"][SPANDATA.AI_MODEL_ID] == "model"

    if send_default_pii and include_prompts:
        assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages
        assert span["data"][SPANDATA.AI_RESPONSES] == [
            {"type": "text", "text": "Hi! I'm Claude!"}
        ]

    else:
        assert SPANDATA.AI_INPUT_MESSAGES not in span["data"]
        assert SPANDATA.AI_RESPONSES not in span["data"]

    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10
    assert span["measurements"]["ai_completion_tokens_used"]["value"] == 30
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 40
    assert span["data"]["ai.streaming"] is True


@pytest.mark.skipif(
    ANTHROPIC_VERSION < (0, 27),
    reason="Versions <0.27.0 do not include InputJSONDelta, which was introduced in >=0.27.0 along with a new message delta type for tool calling.",
)
@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [
        (True, True),
        (True, False),
        (False, True),
        (False, False),
    ],
)
def test_streaming_create_message_with_input_json_delta(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    client = Anthropic(api_key="z")
    returned_stream = Stream(cast_to=None, response=None, client=client)
    returned_stream._iterator = [
        MessageStartEvent(
            message=Message(
                id="msg_0",
                content=[],
                model="claude-3-5-sonnet-20240620",
                role="assistant",
                stop_reason=None,
                stop_sequence=None,
                type="message",
                usage=Usage(input_tokens=366, output_tokens=10),
            ),
            type="message_start",
        ),
        ContentBlockStartEvent(
            type="content_block_start",
            index=0,
            content_block=ToolUseBlock(
                id="toolu_0", input={}, name="get_weather", type="tool_use"
            ),
        ),
        ContentBlockDeltaEvent(
            delta=InputJSONDelta(partial_json="", type="input_json_delta"),
            index=0,
            type="content_block_delta",
        ),
        ContentBlockDeltaEvent(
            delta=InputJSONDelta(partial_json="{'location':", type="input_json_delta"),
            index=0,
            type="content_block_delta",
        ),
        ContentBlockDeltaEvent(
            delta=InputJSONDelta(partial_json=" 'S", type="input_json_delta"),
            index=0,
            type="content_block_delta",
        ),
        ContentBlockDeltaEvent(
            delta=InputJSONDelta(partial_json="an ", type="input_json_delta"),
            index=0,
            type="content_block_delta",
        ),
        ContentBlockDeltaEvent(
            delta=InputJSONDelta(partial_json="Francisco, C", type="input_json_delta"),
            index=0,
            type="content_block_delta",
        ),
        ContentBlockDeltaEvent(
            delta=InputJSONDelta(partial_json="A'}", type="input_json_delta"),
            index=0,
            type="content_block_delta",
        ),
        ContentBlockStopEvent(type="content_block_stop", index=0),
        MessageDeltaEvent(
            delta=Delta(stop_reason="tool_use", stop_sequence=None),
            usage=MessageDeltaUsage(output_tokens=41),
            type="message_delta",
        ),
    ]

    sentry_init(
        integrations=[AnthropicIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()
    client.messages._post = mock.Mock(return_value=returned_stream)

    messages = [
        {
            "role": "user",
            "content": "What is the weather like in San Francisco?",
        }
    ]

    with start_transaction(name="anthropic"):
        message = client.messages.create(
            max_tokens=1024, messages=messages, model="model", stream=True
        )

        for _ in message:
            pass

    assert message == returned_stream
    assert len(events) == 1
    (event,) = events

    assert event["type"] == "transaction"
    assert event["transaction"] == "anthropic"

    assert len(event["spans"]) == 1
    (span,) = event["spans"]

    assert span["op"] == OP.ANTHROPIC_MESSAGES_CREATE
    assert span["description"] == "Anthropic messages create"
    assert span["data"][SPANDATA.AI_MODEL_ID] == "model"

    if send_default_pii and include_prompts:
        assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages
        assert span["data"][SPANDATA.AI_RESPONSES] == [
            {"text": "", "type": "text"}
        ]  # we do not record InputJSONDelta because it could contain PII

    else:
        assert SPANDATA.AI_INPUT_MESSAGES not in span["data"]
        assert SPANDATA.AI_RESPONSES not in span["data"]

    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 366
    assert span["measurements"]["ai_completion_tokens_used"]["value"] == 51
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 417
    assert span["data"]["ai.streaming"] is True


@pytest.mark.asyncio
@pytest.mark.skipif(
    ANTHROPIC_VERSION < (0, 27),
    reason="Versions <0.27.0 do not include InputJSONDelta, which was introduced in >=0.27.0 along with a new message delta type for tool calling.",
)
@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [
        (True, True),
        (True, False),
        (False, True),
        (False, False),
    ],
)
async def test_streaming_create_message_with_input_json_delta_async(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    client = AsyncAnthropic(api_key="z")
    returned_stream = AsyncStream(cast_to=None, response=None, client=client)
    returned_stream._iterator = async_iterator(
        [
            MessageStartEvent(
                message=Message(
                    id="msg_0",
                    content=[],
                    model="claude-3-5-sonnet-20240620",
                    role="assistant",
                    stop_reason=None,
                    stop_sequence=None,
                    type="message",
                    usage=Usage(input_tokens=366, output_tokens=10),
                ),
                type="message_start",
            ),
            ContentBlockStartEvent(
                type="content_block_start",
                index=0,
                content_block=ToolUseBlock(
                    id="toolu_0", input={}, name="get_weather", type="tool_use"
                ),
            ),
            ContentBlockDeltaEvent(
                delta=InputJSONDelta(partial_json="", type="input_json_delta"),
                index=0,
                type="content_block_delta",
            ),
            ContentBlockDeltaEvent(
                delta=InputJSONDelta(
                    partial_json="{'location':", type="input_json_delta"
                ),
                index=0,
                type="content_block_delta",
            ),
            ContentBlockDeltaEvent(
                delta=InputJSONDelta(partial_json=" 'S", type="input_json_delta"),
                index=0,
                type="content_block_delta",
            ),
            ContentBlockDeltaEvent(
                delta=InputJSONDelta(partial_json="an ", type="input_json_delta"),
                index=0,
                type="content_block_delta",
            ),
            ContentBlockDeltaEvent(
                delta=InputJSONDelta(
                    partial_json="Francisco, C", type="input_json_delta"
                ),
                index=0,
                type="content_block_delta",
            ),
            ContentBlockDeltaEvent(
                delta=InputJSONDelta(partial_json="A'}", type="input_json_delta"),
                index=0,
                type="content_block_delta",
            ),
            ContentBlockStopEvent(type="content_block_stop", index=0),
            MessageDeltaEvent(
                delta=Delta(stop_reason="tool_use", stop_sequence=None),
                usage=MessageDeltaUsage(output_tokens=41),
                type="message_delta",
            ),
        ]
    )

    sentry_init(
        integrations=[AnthropicIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()
    client.messages._post = AsyncMock(return_value=returned_stream)

    messages = [
        {
            "role": "user",
            "content": "What is the weather like in San Francisco?",
        }
    ]

    with start_transaction(name="anthropic"):
        message = await client.messages.create(
            max_tokens=1024, messages=messages, model="model", stream=True
        )

        async for _ in message:
            pass

    assert message == returned_stream
    assert len(events) == 1
    (event,) = events

    assert event["type"] == "transaction"
    assert event["transaction"] == "anthropic"

    assert len(event["spans"]) == 1
    (span,) = event["spans"]

    assert span["op"] == OP.ANTHROPIC_MESSAGES_CREATE
    assert span["description"] == "Anthropic messages create"
    assert span["data"][SPANDATA.AI_MODEL_ID] == "model"

    if send_default_pii and include_prompts:
        assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages
        assert span["data"][SPANDATA.AI_RESPONSES] == [
            {"text": "", "type": "text"}
        ]  # we do not record InputJSONDelta because it could contain PII

    else:
        assert SPANDATA.AI_INPUT_MESSAGES not in span["data"]
        assert SPANDATA.AI_RESPONSES not in span["data"]

    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 366
    assert span["measurements"]["ai_completion_tokens_used"]["value"] == 51
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 417
    assert span["data"]["ai.streaming"] is True


def test_exception_message_create(sentry_init, capture_events):
    sentry_init(integrations=[AnthropicIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    client = Anthropic(api_key="z")
    client.messages._post = mock.Mock(
        side_effect=AnthropicError("API rate limit reached")
    )
    with pytest.raises(AnthropicError):
        client.messages.create(
            model="some-model",
            messages=[{"role": "system", "content": "I'm throwing an exception"}],
            max_tokens=1024,
        )

    (event,) = events
    assert event["level"] == "error"


@pytest.mark.asyncio
async def test_exception_message_create_async(sentry_init, capture_events):
    sentry_init(integrations=[AnthropicIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    client = AsyncAnthropic(api_key="z")
    client.messages._post = AsyncMock(
        side_effect=AnthropicError("API rate limit reached")
    )
    with pytest.raises(AnthropicError):
        await client.messages.create(
            model="some-model",
            messages=[{"role": "system", "content": "I'm throwing an exception"}],
            max_tokens=1024,
        )

    (event,) = events
    assert event["level"] == "error"


def test_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[AnthropicIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = Anthropic(api_key="z")
    client.messages._post = mock.Mock(return_value=EXAMPLE_MESSAGE)

    messages = [
        {
            "role": "user",
            "content": "Hello, Claude",
        }
    ]

    with start_transaction(name="anthropic"):
        client.messages.create(max_tokens=1024, messages=messages, model="model")

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.ai.anthropic"


@pytest.mark.asyncio
async def test_span_origin_async(sentry_init, capture_events):
    sentry_init(
        integrations=[AnthropicIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = AsyncAnthropic(api_key="z")
    client.messages._post = AsyncMock(return_value=EXAMPLE_MESSAGE)

    messages = [
        {
            "role": "user",
            "content": "Hello, Claude",
        }
    ]

    with start_transaction(name="anthropic"):
        await client.messages.create(max_tokens=1024, messages=messages, model="model")

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.ai.anthropic"
sentry-python-2.18.0/tests/integrations/argv/000077500000000000000000000000001471214654000212605ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/argv/test_argv.py000066400000000000000000000006441471214654000236340ustar00rootroot00000000000000import sys

from sentry_sdk import capture_message
from sentry_sdk.integrations.argv import ArgvIntegration


def test_basic(sentry_init, capture_events, monkeypatch):
    sentry_init(integrations=[ArgvIntegration()])

    argv = ["foo", "bar", "baz"]
    monkeypatch.setattr(sys, "argv", argv)

    events = capture_events()
    capture_message("hi")
    (event,) = events
    assert event["extra"]["sys.argv"] == argv
sentry-python-2.18.0/tests/integrations/ariadne/000077500000000000000000000000001471214654000217245ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/ariadne/__init__.py000066400000000000000000000001521471214654000240330ustar00rootroot00000000000000import pytest

pytest.importorskip("ariadne")
pytest.importorskip("fastapi")
pytest.importorskip("flask")
sentry-python-2.18.0/tests/integrations/ariadne/test_ariadne.py000066400000000000000000000165041471214654000247460ustar00rootroot00000000000000from ariadne import gql, graphql_sync, ObjectType, QueryType, make_executable_schema
from ariadne.asgi import GraphQL
from fastapi import FastAPI
from fastapi.testclient import TestClient
from flask import Flask, request, jsonify

from sentry_sdk.integrations.ariadne import AriadneIntegration
from sentry_sdk.integrations.fastapi import FastApiIntegration
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration


def schema_factory():
    type_defs = gql(
        """
        type Query {
            greeting(name: String): Greeting
            error: String
        }

        type Greeting {
            name: String
        }
    """
    )

    query = QueryType()
    greeting = ObjectType("Greeting")

    @query.field("greeting")
    def resolve_greeting(*_, **kwargs):
        name = kwargs.pop("name")
        return {"name": name}

    @query.field("error")
    def resolve_error(obj, *_):
        raise RuntimeError("resolver failed")

    @greeting.field("name")
    def resolve_name(obj, *_):
        return "Hello, {}!".format(obj["name"])

    return make_executable_schema(type_defs, query)


def test_capture_request_and_response_if_send_pii_is_on_async(
    sentry_init, capture_events
):
    sentry_init(
        send_default_pii=True,
        integrations=[
            AriadneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = schema_factory()

    async_app = FastAPI()
    async_app.mount("/graphql/", GraphQL(schema))

    query = {"query": "query ErrorQuery {error}"}
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
    assert event["contexts"]["response"] == {
        "data": {
            "data": {"error": None},
            "errors": [
                {
                    "locations": [{"column": 19, "line": 1}],
                    "message": "resolver failed",
                    "path": ["error"],
                }
            ],
        }
    }
    assert event["request"]["api_target"] == "graphql"
    assert event["request"]["data"] == query


def test_capture_request_and_response_if_send_pii_is_on_sync(
    sentry_init, capture_events
):
    sentry_init(
        send_default_pii=True,
        integrations=[AriadneIntegration(), FlaskIntegration()],
    )
    events = capture_events()

    schema = schema_factory()

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server():
        data = request.get_json()
        success, result = graphql_sync(schema, data)
        return jsonify(result), 200

    query = {"query": "query ErrorQuery {error}"}
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
    assert event["contexts"]["response"] == {
        "data": {
            "data": {"error": None},
            "errors": [
                {
                    "locations": [{"column": 19, "line": 1}],
                    "message": "resolver failed",
                    "path": ["error"],
                }
            ],
        }
    }
    assert event["request"]["api_target"] == "graphql"
    assert event["request"]["data"] == query


def test_do_not_capture_request_and_response_if_send_pii_is_off_async(
    sentry_init, capture_events
):
    sentry_init(
        integrations=[
            AriadneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = schema_factory()

    async_app = FastAPI()
    async_app.mount("/graphql/", GraphQL(schema))

    query = {"query": "query ErrorQuery {error}"}
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
    assert "data" not in event["request"]
    assert "response" not in event["contexts"]


def test_do_not_capture_request_and_response_if_send_pii_is_off_sync(
    sentry_init, capture_events
):
    sentry_init(
        integrations=[AriadneIntegration(), FlaskIntegration()],
    )
    events = capture_events()

    schema = schema_factory()

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server():
        data = request.get_json()
        success, result = graphql_sync(schema, data)
        return jsonify(result), 200

    query = {"query": "query ErrorQuery {error}"}
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
    assert "data" not in event["request"]
    assert "response" not in event["contexts"]


def test_capture_validation_error(sentry_init, capture_events):
    sentry_init(
        send_default_pii=True,
        integrations=[
            AriadneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = schema_factory()

    async_app = FastAPI()
    async_app.mount("/graphql/", GraphQL(schema))

    query = {"query": "query ErrorQuery {doesnt_exist}"}
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
    assert event["contexts"]["response"] == {
        "data": {
            "errors": [
                {
                    "locations": [{"column": 19, "line": 1}],
                    "message": "Cannot query field 'doesnt_exist' on type 'Query'.",
                }
            ]
        }
    }
    assert event["request"]["api_target"] == "graphql"
    assert event["request"]["data"] == query


def test_no_event_if_no_errors_async(sentry_init, capture_events):
    sentry_init(
        integrations=[
            AriadneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = schema_factory()

    async_app = FastAPI()
    async_app.mount("/graphql/", GraphQL(schema))

    query = {
        "query": "query GreetingQuery($name: String) { greeting(name: $name) {name} }",
        "variables": {"name": "some name"},
    }
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 0


def test_no_event_if_no_errors_sync(sentry_init, capture_events):
    sentry_init(
        integrations=[AriadneIntegration(), FlaskIntegration()],
    )
    events = capture_events()

    schema = schema_factory()

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server():
        data = request.get_json()
        success, result = graphql_sync(schema, data)
        return jsonify(result), 200

    query = {
        "query": "query GreetingQuery($name: String) { greeting(name: $name) {name} }",
        "variables": {"name": "some name"},
    }
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 0
sentry-python-2.18.0/tests/integrations/arq/000077500000000000000000000000001471214654000211045ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/arq/__init__.py000066400000000000000000000000521471214654000232120ustar00rootroot00000000000000import pytest

pytest.importorskip("arq")
sentry-python-2.18.0/tests/integrations/arq/test_arq.py000066400000000000000000000204101471214654000232750ustar00rootroot00000000000000import asyncio
import pytest

from sentry_sdk import get_client, start_transaction
from sentry_sdk.integrations.arq import ArqIntegration

import arq.worker
from arq import cron
from arq.connections import ArqRedis
from arq.jobs import Job
from arq.utils import timestamp_ms

from fakeredis.aioredis import FakeRedis


def async_partial(async_fn, *args, **kwargs):
    # asyncio.iscoroutinefunction (Used in the integration code) in Python < 3.8
    # does not detect async functions in functools.partial objects.
    # This partial implementation returns a coroutine instead.
    async def wrapped(ctx):
        return await async_fn(ctx, *args, **kwargs)

    return wrapped


@pytest.fixture(autouse=True)
def patch_fakeredis_info_command():
    from fakeredis._fakesocket import FakeSocket

    if not hasattr(FakeSocket, "info"):
        from fakeredis._commands import command
        from fakeredis._helpers import SimpleString

        @command((SimpleString,), name="info")
        def info(self, section):
            return section

        FakeSocket.info = info


@pytest.fixture
def init_arq(sentry_init):
    def inner(
        cls_functions=None,
        cls_cron_jobs=None,
        kw_functions=None,
        kw_cron_jobs=None,
        allow_abort_jobs_=False,
    ):
        cls_functions = cls_functions or []
        cls_cron_jobs = cls_cron_jobs or []

        kwargs = {}
        if kw_functions is not None:
            kwargs["functions"] = kw_functions
        if kw_cron_jobs is not None:
            kwargs["cron_jobs"] = kw_cron_jobs

        sentry_init(
            integrations=[ArqIntegration()],
            traces_sample_rate=1.0,
            send_default_pii=True,
        )

        server = FakeRedis()
        pool = ArqRedis(pool_or_conn=server.connection_pool)

        class WorkerSettings:
            functions = cls_functions
            cron_jobs = cls_cron_jobs
            redis_pool = pool
            allow_abort_jobs = allow_abort_jobs_

        if not WorkerSettings.functions:
            del WorkerSettings.functions
        if not WorkerSettings.cron_jobs:
            del WorkerSettings.cron_jobs

        worker = arq.worker.create_worker(WorkerSettings, **kwargs)

        return pool, worker

    return inner


@pytest.mark.asyncio
async def test_job_result(init_arq):
    async def increase(ctx, num):
        return num + 1

    increase.__qualname__ = increase.__name__

    pool, worker = init_arq([increase])

    job = await pool.enqueue_job("increase", 3)

    assert isinstance(job, Job)

    await worker.run_job(job.job_id, timestamp_ms())
    result = await job.result()
    job_result = await job.result_info()

    assert result == 4
    assert job_result.result == 4


@pytest.mark.asyncio
async def test_job_retry(capture_events, init_arq):
    async def retry_job(ctx):
        if ctx["job_try"] < 2:
            raise arq.worker.Retry

    retry_job.__qualname__ = retry_job.__name__

    pool, worker = init_arq([retry_job])

    job = await pool.enqueue_job("retry_job")

    events = capture_events()

    await worker.run_job(job.job_id, timestamp_ms())

    event = events.pop(0)
    assert event["contexts"]["trace"]["status"] == "aborted"
    assert event["transaction"] == "retry_job"
    assert event["tags"]["arq_task_id"] == job.job_id
    assert event["extra"]["arq-job"]["retry"] == 1

    await worker.run_job(job.job_id, timestamp_ms())

    event = events.pop(0)
    assert event["contexts"]["trace"]["status"] == "ok"
    assert event["transaction"] == "retry_job"
    assert event["tags"]["arq_task_id"] == job.job_id
    assert event["extra"]["arq-job"]["retry"] == 2


@pytest.mark.parametrize(
    "source", [("cls_functions", "cls_cron_jobs"), ("kw_functions", "kw_cron_jobs")]
)
@pytest.mark.parametrize("job_fails", [True, False], ids=["error", "success"])
@pytest.mark.asyncio
async def test_job_transaction(capture_events, init_arq, source, job_fails):
    async def division(_, a, b=0):
        return a / b

    division.__qualname__ = division.__name__

    cron_func = async_partial(division, a=1, b=int(not job_fails))
    cron_func.__qualname__ = division.__name__

    cron_job = cron(cron_func, minute=0, run_at_startup=True)

    functions_key, cron_jobs_key = source
    pool, worker = init_arq(**{functions_key: [division], cron_jobs_key: [cron_job]})

    events = capture_events()

    job = await pool.enqueue_job("division", 1, b=int(not job_fails))
    await worker.run_job(job.job_id, timestamp_ms())

    loop = asyncio.get_event_loop()
    task = loop.create_task(worker.async_run())
    await asyncio.sleep(1)

    task.cancel()

    await worker.close()

    if job_fails:
        error_func_event = events.pop(0)
        error_cron_event = events.pop(1)

        assert error_func_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
        assert error_func_event["exception"]["values"][0]["mechanism"]["type"] == "arq"

        func_extra = error_func_event["extra"]["arq-job"]
        assert func_extra["task"] == "division"

        assert error_cron_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
        assert error_cron_event["exception"]["values"][0]["mechanism"]["type"] == "arq"

        cron_extra = error_cron_event["extra"]["arq-job"]
        assert cron_extra["task"] == "cron:division"

    [func_event, cron_event] = events

    assert func_event["type"] == "transaction"
    assert func_event["transaction"] == "division"
    assert func_event["transaction_info"] == {"source": "task"}

    assert "arq_task_id" in func_event["tags"]
    assert "arq_task_retry" in func_event["tags"]

    func_extra = func_event["extra"]["arq-job"]

    assert func_extra["task"] == "division"
    assert func_extra["kwargs"] == {"b": int(not job_fails)}
    assert func_extra["retry"] == 1

    assert cron_event["type"] == "transaction"
    assert cron_event["transaction"] == "cron:division"
    assert cron_event["transaction_info"] == {"source": "task"}

    assert "arq_task_id" in cron_event["tags"]
    assert "arq_task_retry" in cron_event["tags"]

    cron_extra = cron_event["extra"]["arq-job"]

    assert cron_extra["task"] == "cron:division"
    assert cron_extra["kwargs"] == {}
    assert cron_extra["retry"] == 1


@pytest.mark.parametrize("source", ["cls_functions", "kw_functions"])
@pytest.mark.asyncio
async def test_enqueue_job(capture_events, init_arq, source):
    async def dummy_job(_):
        pass

    pool, _ = init_arq(**{source: [dummy_job]})

    events = capture_events()

    with start_transaction() as transaction:
        await pool.enqueue_job("dummy_job")

    (event,) = events

    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert event["contexts"]["trace"]["span_id"] == transaction.span_id

    assert len(event["spans"])
    assert event["spans"][0]["op"] == "queue.submit.arq"
    assert event["spans"][0]["description"] == "dummy_job"


@pytest.mark.asyncio
async def test_execute_job_without_integration(init_arq):
    async def dummy_job(_ctx):
        pass

    dummy_job.__qualname__ = dummy_job.__name__

    pool, worker = init_arq([dummy_job])
    # remove the integration to trigger the edge case
    get_client().integrations.pop("arq")

    job = await pool.enqueue_job("dummy_job")

    await worker.run_job(job.job_id, timestamp_ms())

    assert await job.result() is None


@pytest.mark.parametrize("source", ["cls_functions", "kw_functions"])
@pytest.mark.asyncio
async def test_span_origin_producer(capture_events, init_arq, source):
    async def dummy_job(_):
        pass

    pool, _ = init_arq(**{source: [dummy_job]})

    events = capture_events()

    with start_transaction():
        await pool.enqueue_job("dummy_job")

    (event,) = events
    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.queue.arq"


@pytest.mark.asyncio
async def test_span_origin_consumer(capture_events, init_arq):
    async def job(ctx):
        pass

    job.__qualname__ = job.__name__

    pool, worker = init_arq([job])

    job = await pool.enqueue_job("retry_job")

    events = capture_events()

    await worker.run_job(job.job_id, timestamp_ms())

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "auto.queue.arq"
    assert event["spans"][0]["origin"] == "auto.db.redis"
    assert event["spans"][1]["origin"] == "auto.db.redis"
sentry-python-2.18.0/tests/integrations/asgi/000077500000000000000000000000001471214654000212445ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/asgi/__init__.py000066400000000000000000000002011471214654000233460ustar00rootroot00000000000000import pytest

pytest.importorskip("asyncio")
pytest.importorskip("pytest_asyncio")
pytest.importorskip("async_asgi_testclient")
sentry-python-2.18.0/tests/integrations/asgi/test_asgi.py000066400000000000000000000466701471214654000236150ustar00rootroot00000000000000from collections import Counter

import pytest
import sentry_sdk
from sentry_sdk import capture_message
from sentry_sdk.integrations._asgi_common import _get_ip, _get_headers
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware, _looks_like_asgi3

from async_asgi_testclient import TestClient


@pytest.fixture
def asgi3_app():
    async def app(scope, receive, send):
        if scope["type"] == "lifespan":
            while True:
                message = await receive()
                if message["type"] == "lifespan.startup":
                    await send({"type": "lifespan.startup.complete"})
                elif message["type"] == "lifespan.shutdown":
                    await send({"type": "lifespan.shutdown.complete"})
                    return
        elif (
            scope["type"] == "http"
            and "route" in scope
            and scope["route"] == "/trigger/error"
        ):
            1 / 0

        await send(
            {
                "type": "http.response.start",
                "status": 200,
                "headers": [
                    [b"content-type", b"text/plain"],
                ],
            }
        )

        await send(
            {
                "type": "http.response.body",
                "body": b"Hello, world!",
            }
        )

    return app


@pytest.fixture
def asgi3_app_with_error():
    async def send_with_error(event):
        1 / 0

    async def app(scope, receive, send):
        if scope["type"] == "lifespan":
            while True:
                message = await receive()
                if message["type"] == "lifespan.startup":
                    ...  # Do some startup here!
                    await send({"type": "lifespan.startup.complete"})
                elif message["type"] == "lifespan.shutdown":
                    ...  # Do some shutdown here!
                    await send({"type": "lifespan.shutdown.complete"})
                    return
        else:
            await send_with_error(
                {
                    "type": "http.response.start",
                    "status": 200,
                    "headers": [
                        [b"content-type", b"text/plain"],
                    ],
                }
            )
            await send_with_error(
                {
                    "type": "http.response.body",
                    "body": b"Hello, world!",
                }
            )

    return app


@pytest.fixture
def asgi3_app_with_error_and_msg():
    async def app(scope, receive, send):
        await send(
            {
                "type": "http.response.start",
                "status": 200,
                "headers": [
                    [b"content-type", b"text/plain"],
                ],
            }
        )

        capture_message("Let's try dividing by 0")
        1 / 0

        await send(
            {
                "type": "http.response.body",
                "body": b"Hello, world!",
            }
        )

    return app


@pytest.fixture
def asgi3_ws_app():
    def message():
        capture_message("Some message to the world!")
        raise ValueError("Oh no")

    async def app(scope, receive, send):
        await send(
            {
                "type": "websocket.send",
                "text": message(),
            }
        )

    return app


@pytest.fixture
def asgi3_custom_transaction_app():

    async def app(scope, receive, send):
        sentry_sdk.get_current_scope().set_transaction_name("foobar", source="custom")
        await send(
            {
                "type": "http.response.start",
                "status": 200,
                "headers": [
                    [b"content-type", b"text/plain"],
                ],
            }
        )

        await send(
            {
                "type": "http.response.body",
                "body": b"Hello, world!",
            }
        )

    return app


def test_invalid_transaction_style(asgi3_app):
    with pytest.raises(ValueError) as exp:
        SentryAsgiMiddleware(asgi3_app, transaction_style="URL")

    assert (
        str(exp.value)
        == "Invalid value for transaction_style: URL (must be in ('endpoint', 'url'))"
    )


@pytest.mark.asyncio
async def test_capture_transaction(
    sentry_init,
    asgi3_app,
    capture_events,
):
    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app)

    async with TestClient(app) as client:
        events = capture_events()
        await client.get("/some_url?somevalue=123")

    (transaction_event,) = events

    assert transaction_event["type"] == "transaction"
    assert transaction_event["transaction"] == "/some_url"
    assert transaction_event["transaction_info"] == {"source": "url"}
    assert transaction_event["contexts"]["trace"]["op"] == "http.server"
    assert transaction_event["request"] == {
        "headers": {
            "host": "localhost",
            "remote-addr": "127.0.0.1",
            "user-agent": "ASGI-Test-Client",
        },
        "method": "GET",
        "query_string": "somevalue=123",
        "url": "http://localhost/some_url",
    }


@pytest.mark.asyncio
async def test_capture_transaction_with_error(
    sentry_init,
    asgi3_app_with_error,
    capture_events,
    DictionaryContaining,  # noqa: N803
):
    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app_with_error)

    events = capture_events()
    with pytest.raises(ZeroDivisionError):
        async with TestClient(app) as client:
            await client.get("/some_url")

    (
        error_event,
        transaction_event,
    ) = events

    assert error_event["transaction"] == "/some_url"
    assert error_event["transaction_info"] == {"source": "url"}
    assert error_event["contexts"]["trace"]["op"] == "http.server"
    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
    assert error_event["exception"]["values"][0]["value"] == "division by zero"
    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asgi"

    assert transaction_event["type"] == "transaction"
    assert transaction_event["contexts"]["trace"] == DictionaryContaining(
        error_event["contexts"]["trace"]
    )
    assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
    assert transaction_event["transaction"] == error_event["transaction"]
    assert transaction_event["request"] == error_event["request"]


@pytest.mark.asyncio
async def test_has_trace_if_performance_enabled(
    sentry_init,
    asgi3_app_with_error_and_msg,
    capture_events,
):
    sentry_init(traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)

    with pytest.raises(ZeroDivisionError):
        async with TestClient(app) as client:
            events = capture_events()
            await client.get("/")

    msg_event, error_event, transaction_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
        == msg_event["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.asyncio
async def test_has_trace_if_performance_disabled(
    sentry_init,
    asgi3_app_with_error_and_msg,
    capture_events,
):
    sentry_init()
    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)

    with pytest.raises(ZeroDivisionError):
        async with TestClient(app) as client:
            events = capture_events()
            await client.get("/")

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]


@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_enabled(
    sentry_init,
    asgi3_app_with_error_and_msg,
    capture_events,
):
    sentry_init(traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    with pytest.raises(ZeroDivisionError):
        async with TestClient(app) as client:
            events = capture_events()
            await client.get("/", headers={"sentry-trace": sentry_trace_header})

    msg_event, error_event, transaction_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id


@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_disabled(
    sentry_init,
    asgi3_app_with_error_and_msg,
    capture_events,
):
    sentry_init()
    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    with pytest.raises(ZeroDivisionError):
        async with TestClient(app) as client:
            events = capture_events()
            await client.get("/", headers={"sentry-trace": sentry_trace_header})

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]
    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id


@pytest.mark.asyncio
async def test_websocket(sentry_init, asgi3_ws_app, capture_events, request):
    sentry_init(send_default_pii=True)

    events = capture_events()

    asgi3_ws_app = SentryAsgiMiddleware(asgi3_ws_app)

    scope = {
        "type": "websocket",
        "endpoint": asgi3_app,
        "client": ("127.0.0.1", 60457),
        "route": "some_url",
        "headers": [
            ("accept", "*/*"),
        ],
    }

    with pytest.raises(ValueError):
        async with TestClient(asgi3_ws_app, scope=scope) as client:
            async with client.websocket_connect("/ws") as ws:
                await ws.receive_text()

    msg_event, error_event = events

    assert msg_event["message"] == "Some message to the world!"

    (exc,) = error_event["exception"]["values"]
    assert exc["type"] == "ValueError"
    assert exc["value"] == "Oh no"


@pytest.mark.asyncio
async def test_auto_session_tracking_with_aggregates(
    sentry_init, asgi3_app, capture_envelopes
):
    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app)

    scope = {
        "endpoint": asgi3_app,
        "client": ("127.0.0.1", 60457),
    }
    with pytest.raises(ZeroDivisionError):
        envelopes = capture_envelopes()
        async with TestClient(app, scope=scope) as client:
            scope["route"] = "/some/fine/url"
            await client.get("/some/fine/url")
            scope["route"] = "/some/fine/url"
            await client.get("/some/fine/url")
            scope["route"] = "/trigger/error"
            await client.get("/trigger/error")

    sentry_sdk.flush()

    count_item_types = Counter()
    for envelope in envelopes:
        count_item_types[envelope.items[0].type] += 1

    assert count_item_types["transaction"] == 3
    assert count_item_types["event"] == 1
    assert count_item_types["sessions"] == 1
    assert len(envelopes) == 5

    session_aggregates = envelopes[-1].items[0].payload.json["aggregates"]
    assert session_aggregates[0]["exited"] == 2
    assert session_aggregates[0]["crashed"] == 1
    assert len(session_aggregates) == 1


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        (
            "/message",
            "url",
            "generic ASGI request",
            "route",
        ),
        (
            "/message",
            "endpoint",
            "tests.integrations.asgi.test_asgi.asgi3_app..app",
            "component",
        ),
    ],
)
@pytest.mark.asyncio
async def test_transaction_style(
    sentry_init,
    asgi3_app,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)

    scope = {
        "endpoint": asgi3_app,
        "route": url,
        "client": ("127.0.0.1", 60457),
    }

    async with TestClient(app, scope=scope) as client:
        events = capture_events()
        await client.get(url)

    (transaction_event,) = events

    assert transaction_event["transaction"] == expected_transaction
    assert transaction_event["transaction_info"] == {"source": expected_source}


def mock_asgi2_app():
    pass


class MockAsgi2App:
    def __call__():
        pass


class MockAsgi3App(MockAsgi2App):
    def __await__():
        pass

    async def __call__():
        pass


def test_looks_like_asgi3(asgi3_app):
    # branch: inspect.isclass(app)
    assert _looks_like_asgi3(MockAsgi3App)
    assert not _looks_like_asgi3(MockAsgi2App)

    # branch: inspect.isfunction(app)
    assert _looks_like_asgi3(asgi3_app)
    assert not _looks_like_asgi3(mock_asgi2_app)

    # breanch: else
    asgi3 = MockAsgi3App()
    assert _looks_like_asgi3(asgi3)
    asgi2 = MockAsgi2App()
    assert not _looks_like_asgi3(asgi2)


def test_get_ip_x_forwarded_for():
    headers = [
        (b"x-forwarded-for", b"8.8.8.8"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "8.8.8.8"

    # x-forwarded-for overrides x-real-ip
    headers = [
        (b"x-forwarded-for", b"8.8.8.8"),
        (b"x-real-ip", b"10.10.10.10"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "8.8.8.8"

    # when multiple x-forwarded-for headers are, the first is taken
    headers = [
        (b"x-forwarded-for", b"5.5.5.5"),
        (b"x-forwarded-for", b"6.6.6.6"),
        (b"x-forwarded-for", b"7.7.7.7"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "5.5.5.5"


def test_get_ip_x_real_ip():
    headers = [
        (b"x-real-ip", b"10.10.10.10"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "10.10.10.10"

    # x-forwarded-for overrides x-real-ip
    headers = [
        (b"x-forwarded-for", b"8.8.8.8"),
        (b"x-real-ip", b"10.10.10.10"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "8.8.8.8"


def test_get_ip():
    # if now headers are provided the ip is taken from the client.
    headers = []
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "127.0.0.1"

    # x-forwarded-for header overides the ip from client
    headers = [
        (b"x-forwarded-for", b"8.8.8.8"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "8.8.8.8"

    # x-real-for header overides the ip from client
    headers = [
        (b"x-real-ip", b"10.10.10.10"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    ip = _get_ip(scope)
    assert ip == "10.10.10.10"


def test_get_headers():
    headers = [
        (b"x-real-ip", b"10.10.10.10"),
        (b"some_header", b"123"),
        (b"some_header", b"abc"),
    ]
    scope = {
        "client": ("127.0.0.1", 60457),
        "headers": headers,
    }
    headers = _get_headers(scope)
    assert headers == {
        "x-real-ip": "10.10.10.10",
        "some_header": "123, abc",
    }


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "/message/123456",
            "url",
        ),
        (
            "/message/123456",
            "url",
            "/message/123456",
            "url",
        ),
    ],
)
async def test_transaction_name(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    asgi3_app,
    capture_envelopes,
):
    """
    Tests that the transaction name is something meaningful.
    """
    sentry_init(
        traces_sample_rate=1.0,
    )

    envelopes = capture_envelopes()

    app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)

    async with TestClient(app) as client:
        await client.get(request_url)

    (transaction_envelope,) = envelopes
    transaction_event = transaction_envelope.get_transaction_event()

    assert transaction_event["transaction"] == expected_transaction_name
    assert (
        transaction_event["transaction_info"]["source"] == expected_transaction_source
    )


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "request_url, transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "/message/123456",
            "url",
        ),
        (
            "/message/123456",
            "url",
            "/message/123456",
            "url",
        ),
    ],
)
async def test_transaction_name_in_traces_sampler(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    asgi3_app,
):
    """
    Tests that a custom traces_sampler has a meaningful transaction name.
    In this case the URL or endpoint, because we do not have the route yet.
    """

    def dummy_traces_sampler(sampling_context):
        assert (
            sampling_context["transaction_context"]["name"] == expected_transaction_name
        )
        assert (
            sampling_context["transaction_context"]["source"]
            == expected_transaction_source
        )

    sentry_init(
        traces_sampler=dummy_traces_sampler,
        traces_sample_rate=1.0,
    )

    app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)

    async with TestClient(app) as client:
        await client.get(request_url)


@pytest.mark.asyncio
async def test_custom_transaction_name(
    sentry_init, asgi3_custom_transaction_app, capture_events
):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()
    app = SentryAsgiMiddleware(asgi3_custom_transaction_app)

    async with TestClient(app) as client:
        await client.get("/test")

    (transaction_event,) = events
    assert transaction_event["type"] == "transaction"
    assert transaction_event["transaction"] == "foobar"
    assert transaction_event["transaction_info"] == {"source": "custom"}
sentry-python-2.18.0/tests/integrations/asyncio/000077500000000000000000000000001471214654000217665ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/asyncio/__init__.py000066400000000000000000000000001471214654000240650ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/asyncio/test_asyncio.py000066400000000000000000000256171471214654000250570ustar00rootroot00000000000000import asyncio
import inspect
import sys
from unittest.mock import MagicMock, patch

import pytest

import sentry_sdk
from sentry_sdk.consts import OP
from sentry_sdk.integrations.asyncio import AsyncioIntegration, patch_asyncio

try:
    from contextvars import Context, ContextVar
except ImportError:
    pass  # All tests will be skipped with incompatible versions


minimum_python_37 = pytest.mark.skipif(
    sys.version_info < (3, 7), reason="Asyncio tests need Python >= 3.7"
)


minimum_python_311 = pytest.mark.skipif(
    sys.version_info < (3, 11),
    reason="Asyncio task context parameter was introduced in Python 3.11",
)


async def foo():
    await asyncio.sleep(0.01)


async def bar():
    await asyncio.sleep(0.01)


async def boom():
    1 / 0


@pytest.fixture(scope="session")
def event_loop(request):
    """Create an instance of the default event loop for each test case."""
    loop = asyncio.get_event_loop_policy().new_event_loop()
    yield loop
    loop.close()


def get_sentry_task_factory(mock_get_running_loop):
    """
    Patches (mocked) asyncio and gets the sentry_task_factory.
    """
    mock_loop = mock_get_running_loop.return_value
    patch_asyncio()
    patched_factory = mock_loop.set_task_factory.call_args[0][0]

    return patched_factory


@minimum_python_37
@pytest.mark.asyncio
async def test_create_task(
    sentry_init,
    capture_events,
    event_loop,
):
    sentry_init(
        traces_sample_rate=1.0,
        send_default_pii=True,
        integrations=[
            AsyncioIntegration(),
        ],
    )

    events = capture_events()

    with sentry_sdk.start_transaction(name="test_transaction_for_create_task"):
        with sentry_sdk.start_span(op="root", name="not so important"):
            tasks = [event_loop.create_task(foo()), event_loop.create_task(bar())]
            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)

            sentry_sdk.flush()

    (transaction_event,) = events

    assert transaction_event["spans"][0]["op"] == "root"
    assert transaction_event["spans"][0]["description"] == "not so important"

    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
    assert transaction_event["spans"][1]["description"] == "foo"
    assert (
        transaction_event["spans"][1]["parent_span_id"]
        == transaction_event["spans"][0]["span_id"]
    )

    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
    assert transaction_event["spans"][2]["description"] == "bar"
    assert (
        transaction_event["spans"][2]["parent_span_id"]
        == transaction_event["spans"][0]["span_id"]
    )


@minimum_python_37
@pytest.mark.asyncio
async def test_gather(
    sentry_init,
    capture_events,
):
    sentry_init(
        traces_sample_rate=1.0,
        send_default_pii=True,
        integrations=[
            AsyncioIntegration(),
        ],
    )

    events = capture_events()

    with sentry_sdk.start_transaction(name="test_transaction_for_gather"):
        with sentry_sdk.start_span(op="root", name="not so important"):
            await asyncio.gather(foo(), bar(), return_exceptions=True)

        sentry_sdk.flush()

    (transaction_event,) = events

    assert transaction_event["spans"][0]["op"] == "root"
    assert transaction_event["spans"][0]["description"] == "not so important"

    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
    assert transaction_event["spans"][1]["description"] == "foo"
    assert (
        transaction_event["spans"][1]["parent_span_id"]
        == transaction_event["spans"][0]["span_id"]
    )

    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
    assert transaction_event["spans"][2]["description"] == "bar"
    assert (
        transaction_event["spans"][2]["parent_span_id"]
        == transaction_event["spans"][0]["span_id"]
    )


@minimum_python_37
@pytest.mark.asyncio
async def test_exception(
    sentry_init,
    capture_events,
    event_loop,
):
    sentry_init(
        traces_sample_rate=1.0,
        send_default_pii=True,
        integrations=[
            AsyncioIntegration(),
        ],
    )

    events = capture_events()

    with sentry_sdk.start_transaction(name="test_exception"):
        with sentry_sdk.start_span(op="root", name="not so important"):
            tasks = [event_loop.create_task(boom()), event_loop.create_task(bar())]
            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)

            sentry_sdk.flush()

    (error_event, _) = events

    assert error_event["transaction"] == "test_exception"
    assert error_event["contexts"]["trace"]["op"] == "function"
    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
    assert error_event["exception"]["values"][0]["value"] == "division by zero"
    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asyncio"


@minimum_python_37
@pytest.mark.asyncio
async def test_task_result(sentry_init):
    sentry_init(
        integrations=[
            AsyncioIntegration(),
        ],
    )

    async def add(a, b):
        return a + b

    result = await asyncio.create_task(add(1, 2))
    assert result == 3, result


@minimum_python_311
@pytest.mark.asyncio
async def test_task_with_context(sentry_init):
    """
    Integration test to ensure working context parameter in Python 3.11+
    """
    sentry_init(
        integrations=[
            AsyncioIntegration(),
        ],
    )

    var = ContextVar("var")
    var.set("original value")

    async def change_value():
        var.set("changed value")

    async def retrieve_value():
        return var.get()

    # Create a context and run both tasks within the context
    ctx = Context()
    async with asyncio.TaskGroup() as tg:
        tg.create_task(change_value(), context=ctx)
        retrieve_task = tg.create_task(retrieve_value(), context=ctx)

    assert retrieve_task.result() == "changed value"


@minimum_python_37
@patch("asyncio.get_running_loop")
def test_patch_asyncio(mock_get_running_loop):
    """
    Test that the patch_asyncio function will patch the task factory.
    """
    mock_loop = mock_get_running_loop.return_value

    patch_asyncio()

    assert mock_loop.set_task_factory.called

    set_task_factory_args, _ = mock_loop.set_task_factory.call_args
    assert len(set_task_factory_args) == 1

    sentry_task_factory, *_ = set_task_factory_args
    assert callable(sentry_task_factory)


@minimum_python_37
@patch("asyncio.get_running_loop")
@patch("sentry_sdk.integrations.asyncio.Task")
def test_sentry_task_factory_no_factory(MockTask, mock_get_running_loop):  # noqa: N803
    mock_loop = mock_get_running_loop.return_value
    mock_coro = MagicMock()

    # Set the original task factory to None
    mock_loop.get_task_factory.return_value = None

    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)

    # The call we are testing
    ret_val = sentry_task_factory(mock_loop, mock_coro)

    assert MockTask.called
    assert ret_val == MockTask.return_value

    task_args, task_kwargs = MockTask.call_args
    assert len(task_args) == 1

    coro_param, *_ = task_args
    assert inspect.iscoroutine(coro_param)

    assert "loop" in task_kwargs
    assert task_kwargs["loop"] == mock_loop


@minimum_python_37
@patch("asyncio.get_running_loop")
def test_sentry_task_factory_with_factory(mock_get_running_loop):
    mock_loop = mock_get_running_loop.return_value
    mock_coro = MagicMock()

    # The original task factory will be mocked out here, let's retrieve the value for later
    orig_task_factory = mock_loop.get_task_factory.return_value

    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)

    # The call we are testing
    ret_val = sentry_task_factory(mock_loop, mock_coro)

    assert orig_task_factory.called
    assert ret_val == orig_task_factory.return_value

    task_factory_args, _ = orig_task_factory.call_args
    assert len(task_factory_args) == 2

    loop_arg, coro_arg = task_factory_args
    assert loop_arg == mock_loop
    assert inspect.iscoroutine(coro_arg)


@minimum_python_311
@patch("asyncio.get_running_loop")
@patch("sentry_sdk.integrations.asyncio.Task")
def test_sentry_task_factory_context_no_factory(
    MockTask, mock_get_running_loop  # noqa: N803
):
    mock_loop = mock_get_running_loop.return_value
    mock_coro = MagicMock()
    mock_context = MagicMock()

    # Set the original task factory to None
    mock_loop.get_task_factory.return_value = None

    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)

    # The call we are testing
    ret_val = sentry_task_factory(mock_loop, mock_coro, context=mock_context)

    assert MockTask.called
    assert ret_val == MockTask.return_value

    task_args, task_kwargs = MockTask.call_args
    assert len(task_args) == 1

    coro_param, *_ = task_args
    assert inspect.iscoroutine(coro_param)

    assert "loop" in task_kwargs
    assert task_kwargs["loop"] == mock_loop
    assert "context" in task_kwargs
    assert task_kwargs["context"] == mock_context


@minimum_python_311
@patch("asyncio.get_running_loop")
def test_sentry_task_factory_context_with_factory(mock_get_running_loop):
    mock_loop = mock_get_running_loop.return_value
    mock_coro = MagicMock()
    mock_context = MagicMock()

    # The original task factory will be mocked out here, let's retrieve the value for later
    orig_task_factory = mock_loop.get_task_factory.return_value

    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)

    # The call we are testing
    ret_val = sentry_task_factory(mock_loop, mock_coro, context=mock_context)

    assert orig_task_factory.called
    assert ret_val == orig_task_factory.return_value

    task_factory_args, task_factory_kwargs = orig_task_factory.call_args
    assert len(task_factory_args) == 2

    loop_arg, coro_arg = task_factory_args
    assert loop_arg == mock_loop
    assert inspect.iscoroutine(coro_arg)

    assert "context" in task_factory_kwargs
    assert task_factory_kwargs["context"] == mock_context


@minimum_python_37
@pytest.mark.asyncio
async def test_span_origin(
    sentry_init,
    capture_events,
    event_loop,
):
    sentry_init(
        integrations=[AsyncioIntegration()],
        traces_sample_rate=1.0,
    )

    events = capture_events()

    with sentry_sdk.start_transaction(name="something"):
        tasks = [
            event_loop.create_task(foo()),
        ]
        await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)

        sentry_sdk.flush()

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.function.asyncio"
sentry-python-2.18.0/tests/integrations/asyncpg/000077500000000000000000000000001471214654000217655ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/asyncpg/__init__.py000066400000000000000000000005041471214654000240750ustar00rootroot00000000000000import os
import sys
import pytest

pytest.importorskip("asyncpg")
pytest.importorskip("pytest_asyncio")

# Load `asyncpg_helpers` into the module search path to test query source path names relative to module. See
# `test_query_source_with_module_in_search_path`
sys.path.insert(0, os.path.join(os.path.dirname(__file__)))
sentry-python-2.18.0/tests/integrations/asyncpg/asyncpg_helpers/000077500000000000000000000000001471214654000251535ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/asyncpg/asyncpg_helpers/__init__.py000066400000000000000000000000001471214654000272520ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/asyncpg/asyncpg_helpers/helpers.py000066400000000000000000000001361471214654000271670ustar00rootroot00000000000000async def execute_query_in_connection(query, connection):
    await connection.execute(query)
sentry-python-2.18.0/tests/integrations/asyncpg/test_asyncpg.py000066400000000000000000000527211471214654000250510ustar00rootroot00000000000000"""
Tests need pytest-asyncio installed.

Tests need a local postgresql instance running, this can best be done using
```sh
docker run --rm --name some-postgres -e POSTGRES_USER=foo -e POSTGRES_PASSWORD=bar -d -p 5432:5432 postgres
```

The tests use the following credentials to establish a database connection.
"""

import os


PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost")
PG_PORT = int(os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PORT", "5432"))
PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres")
PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry")
PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres")

import datetime
from contextlib import contextmanager
from unittest import mock

import asyncpg
import pytest
import pytest_asyncio
from asyncpg import connect, Connection

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.integrations.asyncpg import AsyncPGIntegration
from sentry_sdk.consts import SPANDATA
from sentry_sdk.tracing_utils import record_sql_queries
from tests.conftest import ApproxDict


PG_CONNECTION_URI = "postgresql://{}:{}@{}/{}".format(
    PG_USER, PG_PASSWORD, PG_HOST, PG_NAME
)
CRUMBS_CONNECT = {
    "category": "query",
    "data": ApproxDict(
        {
            "db.name": PG_NAME,
            "db.system": "postgresql",
            "db.user": PG_USER,
            "server.address": PG_HOST,
            "server.port": PG_PORT,
        }
    ),
    "message": "connect",
    "type": "default",
}


@pytest_asyncio.fixture(autouse=True)
async def _clean_pg():
    conn = await connect(PG_CONNECTION_URI)
    await conn.execute("DROP TABLE IF EXISTS users")
    await conn.execute(
        """
            CREATE TABLE users(
                id serial PRIMARY KEY,
                name text,
                password text,
                dob date
            )
        """
    )
    await conn.close()


@pytest.mark.asyncio
async def test_connect(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [CRUMBS_CONNECT]


@pytest.mark.asyncio
async def test_execute(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.execute(
        "INSERT INTO users(name, password, dob) VALUES ('Alice', 'pw', '1990-12-25')",
    )

    await conn.execute(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        "Bob",
        "secret_pw",
        datetime.date(1984, 3, 1),
    )

    row = await conn.fetchrow("SELECT * FROM users WHERE name = $1", "Bob")
    assert row == (2, "Bob", "secret_pw", datetime.date(1984, 3, 1))

    row = await conn.fetchrow("SELECT * FROM users WHERE name = 'Bob'")
    assert row == (2, "Bob", "secret_pw", datetime.date(1984, 3, 1))

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {},
            "message": "INSERT INTO users(name, password, dob) VALUES ('Alice', 'pw', '1990-12-25')",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE name = $1",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE name = 'Bob'",
            "type": "default",
        },
    ]


@pytest.mark.asyncio
async def test_execute_many(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.executemany(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        [
            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
            ("Alice", "pw", datetime.date(1990, 12, 25)),
        ],
    )

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {"db.executemany": True},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
    ]


@pytest.mark.asyncio
async def test_record_params(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration(record_params=True)],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.execute(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        "Bob",
        "secret_pw",
        datetime.date(1984, 3, 1),
    )

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {
                "db.params": ["Bob", "secret_pw", "datetime.date(1984, 3, 1)"],
                "db.paramstyle": "format",
            },
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
    ]


@pytest.mark.asyncio
async def test_cursor(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.executemany(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        [
            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
            ("Alice", "pw", datetime.date(1990, 12, 25)),
        ],
    )

    async with conn.transaction():
        # Postgres requires non-scrollable cursors to be created
        # and used in a transaction.
        async for record in conn.cursor(
            "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1)
        ):
            print(record)

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {"db.executemany": True},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
        {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"},
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE dob > $1",
            "type": "default",
        },
        {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"},
    ]


@pytest.mark.asyncio
async def test_cursor_manual(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.executemany(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        [
            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
            ("Alice", "pw", datetime.date(1990, 12, 25)),
        ],
    )
    #
    async with conn.transaction():
        # Postgres requires non-scrollable cursors to be created
        # and used in a transaction.
        cur = await conn.cursor(
            "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1)
        )
        record = await cur.fetchrow()
        print(record)
        while await cur.forward(1):
            record = await cur.fetchrow()
            print(record)

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {"db.executemany": True},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
        {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"},
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE dob > $1",
            "type": "default",
        },
        {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"},
    ]


@pytest.mark.asyncio
async def test_prepared_stmt(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    conn: Connection = await connect(PG_CONNECTION_URI)

    await conn.executemany(
        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
        [
            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
            ("Alice", "pw", datetime.date(1990, 12, 25)),
        ],
    )

    stmt = await conn.prepare("SELECT * FROM users WHERE name = $1")

    print(await stmt.fetchval("Bob"))
    print(await stmt.fetchval("Alice"))

    await conn.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        CRUMBS_CONNECT,
        {
            "category": "query",
            "data": {"db.executemany": True},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE name = $1",
            "type": "default",
        },
    ]


@pytest.mark.asyncio
async def test_connection_pool(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[AsyncPGIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    pool_size = 2

    pool = await asyncpg.create_pool(
        PG_CONNECTION_URI, min_size=pool_size, max_size=pool_size
    )

    async with pool.acquire() as conn:
        await conn.execute(
            "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "Bob",
            "secret_pw",
            datetime.date(1984, 3, 1),
        )

    async with pool.acquire() as conn:
        row = await conn.fetchrow("SELECT * FROM users WHERE name = $1", "Bob")
        assert row == (1, "Bob", "secret_pw", datetime.date(1984, 3, 1))

    await pool.close()

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"] == [
        # The connection pool opens pool_size connections so we have the crumbs pool_size times
        *[CRUMBS_CONNECT] * pool_size,
        {
            "category": "query",
            "data": {},
            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT pg_advisory_unlock_all();\n"
            "CLOSE ALL;\n"
            "UNLISTEN *;\n"
            "RESET ALL;",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT * FROM users WHERE name = $1",
            "type": "default",
        },
        {
            "category": "query",
            "data": {},
            "message": "SELECT pg_advisory_unlock_all();\n"
            "CLOSE ALL;\n"
            "UNLISTEN *;\n"
            "RESET ALL;",
            "type": "default",
        },
    ]


@pytest.mark.asyncio
async def test_query_source_disabled(sentry_init, capture_events):
    sentry_options = {
        "integrations": [AsyncPGIntegration()],
        "enable_tracing": True,
        "enable_db_query_source": False,
        "db_query_source_threshold_ms": 0,
    }

    sentry_init(**sentry_options)

    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        conn: Connection = await connect(PG_CONNECTION_URI)

        await conn.execute(
            "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
        )

        await conn.close()

    (event,) = events

    span = event["spans"][-1]
    assert span["description"].startswith("INSERT INTO")

    data = span.get("data", {})

    assert SPANDATA.CODE_LINENO not in data
    assert SPANDATA.CODE_NAMESPACE not in data
    assert SPANDATA.CODE_FILEPATH not in data
    assert SPANDATA.CODE_FUNCTION not in data


@pytest.mark.asyncio
@pytest.mark.parametrize("enable_db_query_source", [None, True])
async def test_query_source_enabled(
    sentry_init, capture_events, enable_db_query_source
):
    sentry_options = {
        "integrations": [AsyncPGIntegration()],
        "enable_tracing": True,
        "db_query_source_threshold_ms": 0,
    }
    if enable_db_query_source is not None:
        sentry_options["enable_db_query_source"] = enable_db_query_source

    sentry_init(**sentry_options)

    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        conn: Connection = await connect(PG_CONNECTION_URI)

        await conn.execute(
            "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
        )

        await conn.close()

    (event,) = events

    span = event["spans"][-1]
    assert span["description"].startswith("INSERT INTO")

    data = span.get("data", {})

    assert SPANDATA.CODE_LINENO in data
    assert SPANDATA.CODE_NAMESPACE in data
    assert SPANDATA.CODE_FILEPATH in data
    assert SPANDATA.CODE_FUNCTION in data


@pytest.mark.asyncio
async def test_query_source(sentry_init, capture_events):
    sentry_init(
        integrations=[AsyncPGIntegration()],
        enable_tracing=True,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
    )

    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        conn: Connection = await connect(PG_CONNECTION_URI)

        await conn.execute(
            "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
        )

        await conn.close()

    (event,) = events

    span = event["spans"][-1]
    assert span["description"].startswith("INSERT INTO")

    data = span.get("data", {})

    assert SPANDATA.CODE_LINENO in data
    assert SPANDATA.CODE_NAMESPACE in data
    assert SPANDATA.CODE_FILEPATH in data
    assert SPANDATA.CODE_FUNCTION in data

    assert type(data.get(SPANDATA.CODE_LINENO)) == int
    assert data.get(SPANDATA.CODE_LINENO) > 0
    assert (
        data.get(SPANDATA.CODE_NAMESPACE) == "tests.integrations.asyncpg.test_asyncpg"
    )
    assert data.get(SPANDATA.CODE_FILEPATH).endswith(
        "tests/integrations/asyncpg/test_asyncpg.py"
    )

    is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
    assert is_relative_path

    assert data.get(SPANDATA.CODE_FUNCTION) == "test_query_source"


@pytest.mark.asyncio
async def test_query_source_with_module_in_search_path(sentry_init, capture_events):
    """
    Test that query source is relative to the path of the module it ran in
    """
    sentry_init(
        integrations=[AsyncPGIntegration()],
        enable_tracing=True,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
    )

    events = capture_events()

    from asyncpg_helpers.helpers import execute_query_in_connection

    with start_transaction(name="test_transaction", sampled=True):
        conn: Connection = await connect(PG_CONNECTION_URI)

        await execute_query_in_connection(
            "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
            conn,
        )

        await conn.close()

    (event,) = events

    span = event["spans"][-1]
    assert span["description"].startswith("INSERT INTO")

    data = span.get("data", {})

    assert SPANDATA.CODE_LINENO in data
    assert SPANDATA.CODE_NAMESPACE in data
    assert SPANDATA.CODE_FILEPATH in data
    assert SPANDATA.CODE_FUNCTION in data

    assert type(data.get(SPANDATA.CODE_LINENO)) == int
    assert data.get(SPANDATA.CODE_LINENO) > 0
    assert data.get(SPANDATA.CODE_NAMESPACE) == "asyncpg_helpers.helpers"
    assert data.get(SPANDATA.CODE_FILEPATH) == "asyncpg_helpers/helpers.py"

    is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
    assert is_relative_path

    assert data.get(SPANDATA.CODE_FUNCTION) == "execute_query_in_connection"


@pytest.mark.asyncio
async def test_no_query_source_if_duration_too_short(sentry_init, capture_events):
    sentry_init(
        integrations=[AsyncPGIntegration()],
        enable_tracing=True,
        enable_db_query_source=True,
        db_query_source_threshold_ms=100,
    )

    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        conn: Connection = await connect(PG_CONNECTION_URI)

        @contextmanager
        def fake_record_sql_queries(*args, **kwargs):
            with record_sql_queries(*args, **kwargs) as span:
                pass
            span.start_timestamp = datetime.datetime(2024, 1, 1, microsecond=0)
            span.timestamp = datetime.datetime(2024, 1, 1, microsecond=99999)
            yield span

        with mock.patch(
            "sentry_sdk.integrations.asyncpg.record_sql_queries",
            fake_record_sql_queries,
        ):
            await conn.execute(
                "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
            )

        await conn.close()

    (event,) = events

    span = event["spans"][-1]
    assert span["description"].startswith("INSERT INTO")

    data = span.get("data", {})

    assert SPANDATA.CODE_LINENO not in data
    assert SPANDATA.CODE_NAMESPACE not in data
    assert SPANDATA.CODE_FILEPATH not in data
    assert SPANDATA.CODE_FUNCTION not in data


@pytest.mark.asyncio
async def test_query_source_if_duration_over_threshold(sentry_init, capture_events):
    sentry_init(
        integrations=[AsyncPGIntegration()],
        enable_tracing=True,
        enable_db_query_source=True,
        db_query_source_threshold_ms=100,
    )

    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        conn: Connection = await connect(PG_CONNECTION_URI)

        @contextmanager
        def fake_record_sql_queries(*args, **kwargs):
            with record_sql_queries(*args, **kwargs) as span:
                pass
            span.start_timestamp = datetime.datetime(2024, 1, 1, microsecond=0)
            span.timestamp = datetime.datetime(2024, 1, 1, microsecond=100001)
            yield span

        with mock.patch(
            "sentry_sdk.integrations.asyncpg.record_sql_queries",
            fake_record_sql_queries,
        ):
            await conn.execute(
                "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
            )

        await conn.close()

    (event,) = events

    span = event["spans"][-1]
    assert span["description"].startswith("INSERT INTO")

    data = span.get("data", {})

    assert SPANDATA.CODE_LINENO in data
    assert SPANDATA.CODE_NAMESPACE in data
    assert SPANDATA.CODE_FILEPATH in data
    assert SPANDATA.CODE_FUNCTION in data

    assert type(data.get(SPANDATA.CODE_LINENO)) == int
    assert data.get(SPANDATA.CODE_LINENO) > 0
    assert (
        data.get(SPANDATA.CODE_NAMESPACE) == "tests.integrations.asyncpg.test_asyncpg"
    )
    assert data.get(SPANDATA.CODE_FILEPATH).endswith(
        "tests/integrations/asyncpg/test_asyncpg.py"
    )

    is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
    assert is_relative_path

    assert (
        data.get(SPANDATA.CODE_FUNCTION)
        == "test_query_source_if_duration_over_threshold"
    )


@pytest.mark.asyncio
async def test_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[AsyncPGIntegration()],
        traces_sample_rate=1.0,
    )

    events = capture_events()

    with start_transaction(name="test_transaction"):
        conn: Connection = await connect(PG_CONNECTION_URI)

        await conn.execute("SELECT 1")
        await conn.fetchrow("SELECT 2")
        await conn.close()

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"

    for span in event["spans"]:
        assert span["origin"] == "auto.db.asyncpg"
sentry-python-2.18.0/tests/integrations/aws_lambda/000077500000000000000000000000001471214654000224135ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/aws_lambda/__init__.py000066400000000000000000000000541471214654000245230ustar00rootroot00000000000000import pytest

pytest.importorskip("boto3")
sentry-python-2.18.0/tests/integrations/aws_lambda/client.py000066400000000000000000000303421471214654000242450ustar00rootroot00000000000000import base64
import boto3
import glob
import hashlib
import os
import subprocess
import sys
import tempfile

from sentry_sdk.consts import VERSION as SDK_VERSION
from sentry_sdk.utils import get_git_revision

AWS_REGION_NAME = "us-east-1"
AWS_CREDENTIALS = {
    "aws_access_key_id": os.environ["SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"],
    "aws_secret_access_key": os.environ["SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"],
}
AWS_LAMBDA_EXECUTION_ROLE_NAME = "lambda-ex"
AWS_LAMBDA_EXECUTION_ROLE_ARN = None


def _install_dependencies(base_dir, subprocess_kwargs):
    """
    Installs dependencies for AWS Lambda function
    """
    setup_cfg = os.path.join(base_dir, "setup.cfg")
    with open(setup_cfg, "w") as f:
        f.write("[install]\nprefix=")

    # Install requirements for Lambda Layer (these are more limited than the SDK requirements,
    # because Lambda does not support the newest versions of some packages)
    subprocess.check_call(
        [
            sys.executable,
            "-m",
            "pip",
            "install",
            "-r",
            "requirements-aws-lambda-layer.txt",
            "--target",
            base_dir,
        ],
        **subprocess_kwargs,
    )
    # Install requirements used for testing
    subprocess.check_call(
        [
            sys.executable,
            "-m",
            "pip",
            "install",
            "mock==3.0.0",
            "funcsigs",
            "--target",
            base_dir,
        ],
        **subprocess_kwargs,
    )
    # Create a source distribution of the Sentry SDK (in parent directory of base_dir)
    subprocess.check_call(
        [
            sys.executable,
            "setup.py",
            "sdist",
            "--dist-dir",
            os.path.dirname(base_dir),
        ],
        **subprocess_kwargs,
    )
    # Install the created Sentry SDK source distribution into the target directory
    # Do not install the dependencies of the SDK, because they where installed by requirements-aws-lambda-layer.txt above
    source_distribution_archive = glob.glob(
        "{}/*.tar.gz".format(os.path.dirname(base_dir))
    )[0]
    subprocess.check_call(
        [
            sys.executable,
            "-m",
            "pip",
            "install",
            source_distribution_archive,
            "--no-deps",
            "--target",
            base_dir,
        ],
        **subprocess_kwargs,
    )


def _create_lambda_function_zip(base_dir):
    """
    Zips the given base_dir omitting Python cache files
    """
    subprocess.run(
        [
            "zip",
            "-q",
            "-x",
            "**/__pycache__/*",
            "-r",
            "lambda-function-package.zip",
            "./",
        ],
        cwd=base_dir,
        check=True,
    )


def _create_lambda_package(
    base_dir, code, initial_handler, layer, syntax_check, subprocess_kwargs
):
    """
    Creates deployable packages (as zip files) for AWS Lambda function
    and optional the accompanying Sentry Lambda layer
    """
    if initial_handler:
        # If Initial handler value is provided i.e. it is not the default
        # `test_lambda.test_handler`, then create another dir level so that our path is
        # test_dir.test_lambda.test_handler
        test_dir_path = os.path.join(base_dir, "test_dir")
        python_init_file = os.path.join(test_dir_path, "__init__.py")
        os.makedirs(test_dir_path)
        with open(python_init_file, "w"):
            # Create __init__ file to make it a python package
            pass

        test_lambda_py = os.path.join(base_dir, "test_dir", "test_lambda.py")
    else:
        test_lambda_py = os.path.join(base_dir, "test_lambda.py")

    with open(test_lambda_py, "w") as f:
        f.write(code)

    if syntax_check:
        # Check file for valid syntax first, and that the integration does not
        # crash when not running in Lambda (but rather a local deployment tool
        # such as chalice's)
        subprocess.check_call([sys.executable, test_lambda_py])

    if layer is None:
        _install_dependencies(base_dir, subprocess_kwargs)
        _create_lambda_function_zip(base_dir)

    else:
        _create_lambda_function_zip(base_dir)

        # Create Lambda layer zip package
        from scripts.build_aws_lambda_layer import build_packaged_zip

        build_packaged_zip(
            base_dir=base_dir,
            make_dist=True,
            out_zip_filename="lambda-layer-package.zip",
        )


def _get_or_create_lambda_execution_role():
    global AWS_LAMBDA_EXECUTION_ROLE_ARN

    policy = """{
        "Version": "2012-10-17",
        "Statement": [
            {
                "Effect": "Allow",
                "Principal": {
                    "Service": "lambda.amazonaws.com"
                },
                "Action": "sts:AssumeRole"
            }
        ]
    }
    """
    iam_client = boto3.client(
        "iam",
        region_name=AWS_REGION_NAME,
        **AWS_CREDENTIALS,
    )

    try:
        response = iam_client.get_role(RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME)
        AWS_LAMBDA_EXECUTION_ROLE_ARN = response["Role"]["Arn"]
    except iam_client.exceptions.NoSuchEntityException:
        # create role for lambda execution
        response = iam_client.create_role(
            RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME,
            AssumeRolePolicyDocument=policy,
        )
        AWS_LAMBDA_EXECUTION_ROLE_ARN = response["Role"]["Arn"]

        # attach policy to role
        iam_client.attach_role_policy(
            RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME,
            PolicyArn="arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole",
        )


def get_boto_client():
    _get_or_create_lambda_execution_role()

    return boto3.client(
        "lambda",
        region_name=AWS_REGION_NAME,
        **AWS_CREDENTIALS,
    )


def run_lambda_function(
    client,
    runtime,
    code,
    payload,
    add_finalizer,
    syntax_check=True,
    timeout=30,
    layer=None,
    initial_handler=None,
    subprocess_kwargs=(),
):
    """
    Creates a Lambda function with the given code, and invokes it.

    If the same code is run multiple times the function will NOT be
    created anew each time but the existing function will be reused.
    """
    subprocess_kwargs = dict(subprocess_kwargs)

    # Making a unique function name depending on all the code that is run in it (function code plus SDK version)
    # The name needs to be short so the generated event/envelope json blobs are small enough to be output
    # in the log result of the Lambda function.
    rev = get_git_revision() or SDK_VERSION
    function_hash = hashlib.shake_256((code + rev).encode("utf-8")).hexdigest(6)
    fn_name = "test_{}".format(function_hash)
    full_fn_name = "{}_{}".format(
        fn_name, runtime.replace(".", "").replace("python", "py")
    )

    function_exists_in_aws = True
    try:
        client.get_function(
            FunctionName=full_fn_name,
        )
        print(
            "Lambda function in AWS already existing, taking it (and do not create a local one)"
        )
    except client.exceptions.ResourceNotFoundException:
        function_exists_in_aws = False

    if not function_exists_in_aws:
        tmp_base_dir = tempfile.gettempdir()
        base_dir = os.path.join(tmp_base_dir, fn_name)
        dir_already_existing = os.path.isdir(base_dir)

        if dir_already_existing:
            print("Local Lambda function directory already exists, skipping creation")

        if not dir_already_existing:
            os.mkdir(base_dir)
            _create_lambda_package(
                base_dir, code, initial_handler, layer, syntax_check, subprocess_kwargs
            )

            @add_finalizer
            def clean_up():
                # this closes the web socket so we don't get a
                #   ResourceWarning: unclosed 
                # warning on every test
                # based on https://github.com/boto/botocore/pull/1810
                # (if that's ever merged, this can just become client.close())
                session = client._endpoint.http_session
                managers = [session._manager] + list(session._proxy_managers.values())
                for manager in managers:
                    manager.clear()

        layers = []
        environment = {}
        handler = initial_handler or "test_lambda.test_handler"

        if layer is not None:
            with open(
                os.path.join(base_dir, "lambda-layer-package.zip"), "rb"
            ) as lambda_layer_zip:
                response = client.publish_layer_version(
                    LayerName="python-serverless-sdk-test",
                    Description="Created as part of testsuite for getsentry/sentry-python",
                    Content={"ZipFile": lambda_layer_zip.read()},
                )

            layers = [response["LayerVersionArn"]]
            handler = (
                "sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler"
            )
            environment = {
                "Variables": {
                    "SENTRY_INITIAL_HANDLER": initial_handler
                    or "test_lambda.test_handler",
                    "SENTRY_DSN": "https://123abc@example.com/123",
                    "SENTRY_TRACES_SAMPLE_RATE": "1.0",
                }
            }

        try:
            with open(
                os.path.join(base_dir, "lambda-function-package.zip"), "rb"
            ) as lambda_function_zip:
                client.create_function(
                    Description="Created as part of testsuite for getsentry/sentry-python",
                    FunctionName=full_fn_name,
                    Runtime=runtime,
                    Timeout=timeout,
                    Role=AWS_LAMBDA_EXECUTION_ROLE_ARN,
                    Handler=handler,
                    Code={"ZipFile": lambda_function_zip.read()},
                    Environment=environment,
                    Layers=layers,
                )

                waiter = client.get_waiter("function_active_v2")
                waiter.wait(FunctionName=full_fn_name)
        except client.exceptions.ResourceConflictException:
            print(
                "Lambda function already exists, this is fine, we will just invoke it."
            )

    response = client.invoke(
        FunctionName=full_fn_name,
        InvocationType="RequestResponse",
        LogType="Tail",
        Payload=payload,
    )

    assert 200 <= response["StatusCode"] < 300, response
    return response


# This is for inspecting new Python runtime environments in AWS Lambda
# If you need to debug a new runtime, use this REPL to run arbitrary Python or bash commands
# in that runtime in a Lambda function:
#
#    pip3 install click
#    python3 tests/integrations/aws_lambda/client.py --runtime=python4.0
#


_REPL_CODE = """
import os

def test_handler(event, context):
    line = {line!r}
    if line.startswith(">>> "):
        exec(line[4:])
    elif line.startswith("$ "):
        os.system(line[2:])
    else:
        print("Start a line with $ or >>>")

    return b""
"""

try:
    import click
except ImportError:
    pass
else:

    @click.command()
    @click.option(
        "--runtime", required=True, help="name of the runtime to use, eg python3.11"
    )
    @click.option("--verbose", is_flag=True, default=False)
    def repl(runtime, verbose):
        """
        Launch a "REPL" against AWS Lambda to inspect their runtime.
        """

        cleanup = []
        client = get_boto_client()

        print("Start a line with `$ ` to run shell commands, or `>>> ` to run Python")

        while True:
            line = input()

            response = run_lambda_function(
                client,
                runtime,
                _REPL_CODE.format(line=line),
                b"",
                cleanup.append,
                subprocess_kwargs=(
                    {
                        "stdout": subprocess.DEVNULL,
                        "stderr": subprocess.DEVNULL,
                    }
                    if not verbose
                    else {}
                ),
            )

            for line in base64.b64decode(response["LogResult"]).splitlines():
                print(line.decode("utf8"))

            for f in cleanup:
                f()

            cleanup = []

    if __name__ == "__main__":
        repl()
sentry-python-2.18.0/tests/integrations/aws_lambda/test_aws.py000066400000000000000000000706641471214654000246330ustar00rootroot00000000000000"""
# AWS Lambda System Tests

This testsuite uses boto3 to upload actual Lambda functions to AWS Lambda and invoke them.

For running test locally you need to set these env vars:
(You can find the values in the Sentry password manager by searching for "AWS Lambda for Python SDK Tests").

    export SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID="..."
    export SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY="..."


You can use `scripts/aws-cleanup.sh` to delete all files generated by this test suite.


If you need to debug a new runtime, use this REPL to run arbitrary Python or bash commands
in that runtime in a Lambda function: (see the bottom of client.py for more information.)

    pip3 install click
    python3 tests/integrations/aws_lambda/client.py --runtime=python4.0

IMPORTANT:

During running of this test suite temporary folders will be created for compiling the Lambda functions.
This temporary folders will not be cleaned up. This is because in CI generated files have to be shared
between tests and thus the folders can not be deleted right after use.

If you run your tests locally, you need to clean up the temporary folders manually. The location of
the temporary folders is printed when running a test.
"""

import base64
import json
import re
from textwrap import dedent

import pytest

RUNTIMES_TO_TEST = [
    "python3.8",
    "python3.9",
    "python3.10",
    "python3.11",
    "python3.12",
]

LAMBDA_PRELUDE = """
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration, get_lambda_bootstrap
import sentry_sdk
import json
import time

from sentry_sdk.transport import Transport

def truncate_data(data):
    # AWS Lambda truncates the log output to 4kb, which is small enough to miss
    # parts of even a single error-event/transaction-envelope pair if considered
    # in full, so only grab the data we need.

    cleaned_data = {}

    if data.get("type") is not None:
        cleaned_data["type"] = data["type"]

    if data.get("contexts") is not None:
        cleaned_data["contexts"] = {}

        if data["contexts"].get("trace") is not None:
            cleaned_data["contexts"]["trace"] = data["contexts"].get("trace")

    if data.get("transaction") is not None:
        cleaned_data["transaction"] = data.get("transaction")

    if data.get("request") is not None:
        cleaned_data["request"] = data.get("request")

    if data.get("tags") is not None:
        cleaned_data["tags"] = data.get("tags")

    if data.get("exception") is not None:
        cleaned_data["exception"] = data.get("exception")

        for value in cleaned_data["exception"]["values"]:
            for frame in value.get("stacktrace", {}).get("frames", []):
                del frame["vars"]
                del frame["pre_context"]
                del frame["context_line"]
                del frame["post_context"]

    if data.get("extra") is not None:
        cleaned_data["extra"] = {}

        for key in data["extra"].keys():
            if key == "lambda":
                for lambda_key in data["extra"]["lambda"].keys():
                    if lambda_key in ["function_name"]:
                        cleaned_data["extra"].setdefault("lambda", {})[lambda_key] = data["extra"]["lambda"][lambda_key]
            elif key == "cloudwatch logs":
                for cloudwatch_key in data["extra"]["cloudwatch logs"].keys():
                    if cloudwatch_key in ["url", "log_group", "log_stream"]:
                        cleaned_data["extra"].setdefault("cloudwatch logs", {})[cloudwatch_key] = data["extra"]["cloudwatch logs"][cloudwatch_key]

    if data.get("level") is not None:
        cleaned_data["level"] = data.get("level")

    if data.get("message") is not None:
        cleaned_data["message"] = data.get("message")

    if "contexts" not in cleaned_data:
        raise Exception(json.dumps(data))

    return cleaned_data

def event_processor(event):
    return truncate_data(event)

def envelope_processor(envelope):
    (item,) = envelope.items
    item_json = json.loads(item.get_bytes())

    return truncate_data(item_json)


class TestTransport(Transport):
    def capture_envelope(self, envelope):
        envelope_items = envelope_processor(envelope)
        print("\\nENVELOPE: {}\\n".format(json.dumps(envelope_items)))

def init_sdk(timeout_warning=False, **extra_init_args):
    sentry_sdk.init(
        dsn="https://123abc@example.com/123",
        transport=TestTransport,
        integrations=[AwsLambdaIntegration(timeout_warning=timeout_warning)],
        shutdown_timeout=10,
        **extra_init_args
    )
"""


@pytest.fixture
def lambda_client():
    from tests.integrations.aws_lambda.client import get_boto_client

    return get_boto_client()


@pytest.fixture(params=RUNTIMES_TO_TEST)
def lambda_runtime(request):
    return request.param


@pytest.fixture
def run_lambda_function(request, lambda_client, lambda_runtime):
    def inner(
        code, payload, timeout=30, syntax_check=True, layer=None, initial_handler=None
    ):
        from tests.integrations.aws_lambda.client import run_lambda_function

        response = run_lambda_function(
            client=lambda_client,
            runtime=lambda_runtime,
            code=code,
            payload=payload,
            add_finalizer=request.addfinalizer,
            timeout=timeout,
            syntax_check=syntax_check,
            layer=layer,
            initial_handler=initial_handler,
        )

        # Make sure the "ENVELOPE:" and "EVENT:" log entries are always starting a new line. (Sometimes they don't.)
        response["LogResult"] = (
            base64.b64decode(response["LogResult"])
            .replace(b"EVENT:", b"\nEVENT:")
            .replace(b"ENVELOPE:", b"\nENVELOPE:")
            .splitlines()
        )
        response["Payload"] = json.loads(response["Payload"].read().decode("utf-8"))
        del response["ResponseMetadata"]

        envelope_items = []

        for line in response["LogResult"]:
            print("AWS:", line)
            if line.startswith(b"ENVELOPE: "):
                line = line[len(b"ENVELOPE: ") :]
                envelope_items.append(json.loads(line.decode("utf-8")))
            else:
                continue

        return envelope_items, response

    return inner


def test_basic(run_lambda_function):
    envelope_items, response = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk()

        def test_handler(event, context):
            raise Exception("Oh!")
        """
        ),
        b'{"foo": "bar"}',
    )

    assert response["FunctionError"] == "Unhandled"

    (event,) = envelope_items
    assert event["level"] == "error"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Oh!"

    (frame1,) = exception["stacktrace"]["frames"]
    assert frame1["filename"] == "test_lambda.py"
    assert frame1["abs_path"] == "/var/task/test_lambda.py"
    assert frame1["function"] == "test_handler"

    assert frame1["in_app"] is True

    assert exception["mechanism"]["type"] == "aws_lambda"
    assert not exception["mechanism"]["handled"]

    assert event["extra"]["lambda"]["function_name"].startswith("test_")

    logs_url = event["extra"]["cloudwatch logs"]["url"]
    assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=")
    assert not re.search("(=;|=$)", logs_url)
    assert event["extra"]["cloudwatch logs"]["log_group"].startswith(
        "/aws/lambda/test_"
    )

    log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$"
    log_stream = event["extra"]["cloudwatch logs"]["log_stream"]

    assert re.match(log_stream_re, log_stream)


def test_initialization_order(run_lambda_function):
    """Zappa lazily imports our code, so by the time we monkeypatch the handler
    as seen by AWS already runs. At this point at least draining the queue
    should work."""

    envelope_items, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
            def test_handler(event, context):
                init_sdk()
                sentry_sdk.capture_exception(Exception("Oh!"))
        """
        ),
        b'{"foo": "bar"}',
    )

    (event,) = envelope_items

    assert event["level"] == "error"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Oh!"


def test_request_data(run_lambda_function):
    envelope_items, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk()
        def test_handler(event, context):
            sentry_sdk.capture_message("hi")
            return "ok"
        """
        ),
        payload=b"""
        {
          "resource": "/asd",
          "path": "/asd",
          "httpMethod": "GET",
          "headers": {
            "Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
            "User-Agent": "custom",
            "X-Forwarded-Proto": "https"
          },
          "queryStringParameters": {
            "bonkers": "true"
          },
          "pathParameters": null,
          "stageVariables": null,
          "requestContext": {
            "identity": {
              "sourceIp": "213.47.147.207",
              "userArn": "42"
            }
          },
          "body": null,
          "isBase64Encoded": false
        }
        """,
    )

    (event,) = envelope_items

    assert event["request"] == {
        "headers": {
            "Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
            "User-Agent": "custom",
            "X-Forwarded-Proto": "https",
        },
        "method": "GET",
        "query_string": {"bonkers": "true"},
        "url": "https://iwsz2c7uwi.execute-api.us-east-1.amazonaws.com/asd",
    }


@pytest.mark.xfail(
    reason="Amazon changed something (2024-10-01) and on Python 3.9+ our SDK can not capture events in the init phase of the Lambda function anymore. We need to fix this somehow."
)
def test_init_error(run_lambda_function, lambda_runtime):
    envelope_items, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk()
        func()
        """
        ),
        b'{"foo": "bar"}',
        syntax_check=False,
    )

    # We just take the last one, because it could be that in the output of the Lambda
    # invocation there is still the envelope of the previous invocation of the function.
    event = envelope_items[-1]
    assert event["exception"]["values"][0]["value"] == "name 'func' is not defined"


def test_timeout_error(run_lambda_function):
    envelope_items, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(timeout_warning=True)

        def test_handler(event, context):
            time.sleep(10)
            return 0
        """
        ),
        b'{"foo": "bar"}',
        timeout=2,
    )

    (event,) = envelope_items
    assert event["level"] == "error"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ServerlessTimeoutWarning"
    assert exception["value"] in (
        "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds.",
        "WARNING : Function is expected to get timed out. Configured timeout duration = 2 seconds.",
    )

    assert exception["mechanism"]["type"] == "threading"
    assert not exception["mechanism"]["handled"]

    assert event["extra"]["lambda"]["function_name"].startswith("test_")

    logs_url = event["extra"]["cloudwatch logs"]["url"]
    assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=")
    assert not re.search("(=;|=$)", logs_url)
    assert event["extra"]["cloudwatch logs"]["log_group"].startswith(
        "/aws/lambda/test_"
    )

    log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$"
    log_stream = event["extra"]["cloudwatch logs"]["log_stream"]

    assert re.match(log_stream_re, log_stream)


def test_performance_no_error(run_lambda_function):
    envelope_items, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)

        def test_handler(event, context):
            return "test_string"
        """
        ),
        b'{"foo": "bar"}',
    )

    (envelope,) = envelope_items

    assert envelope["type"] == "transaction"
    assert envelope["contexts"]["trace"]["op"] == "function.aws"
    assert envelope["transaction"].startswith("test_")
    assert envelope["transaction"] in envelope["request"]["url"]


def test_performance_error(run_lambda_function):
    envelope_items, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)

        def test_handler(event, context):
            raise Exception("Oh!")
        """
        ),
        b'{"foo": "bar"}',
    )

    (
        error_event,
        transaction_event,
    ) = envelope_items

    assert error_event["level"] == "error"
    (exception,) = error_event["exception"]["values"]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Oh!"

    assert transaction_event["type"] == "transaction"
    assert transaction_event["contexts"]["trace"]["op"] == "function.aws"
    assert transaction_event["transaction"].startswith("test_")
    assert transaction_event["transaction"] in transaction_event["request"]["url"]


@pytest.mark.parametrize(
    "aws_event, has_request_data, batch_size",
    [
        (b"1231", False, 1),
        (b"11.21", False, 1),
        (b'"Good dog!"', False, 1),
        (b"true", False, 1),
        (
            b"""
            [
                {"good dog": "Maisey"},
                {"good dog": "Charlie"},
                {"good dog": "Cory"},
                {"good dog": "Bodhi"}
            ]
            """,
            False,
            4,
        ),
        (
            b"""
            [
                {
                    "headers": {
                        "Host": "x1.io",
                        "X-Forwarded-Proto": "https"
                    },
                    "httpMethod": "GET",
                    "path": "/path1",
                    "queryStringParameters": {
                        "done": "false"
                    },
                    "dog": "Maisey"
                },
                {
                    "headers": {
                        "Host": "x2.io",
                        "X-Forwarded-Proto": "http"
                    },
                    "httpMethod": "POST",
                    "path": "/path2",
                    "queryStringParameters": {
                        "done": "true"
                    },
                    "dog": "Charlie"
                }
            ]
            """,
            True,
            2,
        ),
        (b"[]", False, 1),
    ],
)
def test_non_dict_event(
    run_lambda_function,
    aws_event,
    has_request_data,
    batch_size,
    DictionaryContaining,  # noqa:N803
):
    envelope_items, response = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)

        def test_handler(event, context):
            raise Exception("Oh?")
        """
        ),
        aws_event,
    )

    assert response["FunctionError"] == "Unhandled"

    (
        error_event,
        transaction_event,
    ) = envelope_items
    assert error_event["level"] == "error"
    assert error_event["contexts"]["trace"]["op"] == "function.aws"

    function_name = error_event["extra"]["lambda"]["function_name"]
    assert function_name.startswith("test_")
    assert error_event["transaction"] == function_name

    exception = error_event["exception"]["values"][0]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Oh?"
    assert exception["mechanism"]["type"] == "aws_lambda"

    assert transaction_event["type"] == "transaction"
    assert transaction_event["contexts"]["trace"] == DictionaryContaining(
        error_event["contexts"]["trace"]
    )
    assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
    assert transaction_event["transaction"] == error_event["transaction"]
    assert transaction_event["request"]["url"] == error_event["request"]["url"]

    if has_request_data:
        request_data = {
            "headers": {"Host": "x1.io", "X-Forwarded-Proto": "https"},
            "method": "GET",
            "url": "https://x1.io/path1",
            "query_string": {
                "done": "false",
            },
        }
    else:
        request_data = {"url": "awslambda:///{}".format(function_name)}

    assert error_event["request"] == request_data
    assert transaction_event["request"] == request_data

    if batch_size > 1:
        assert error_event["tags"]["batch_size"] == batch_size
        assert error_event["tags"]["batch_request"] is True
        assert transaction_event["tags"]["batch_size"] == batch_size
        assert transaction_event["tags"]["batch_request"] is True


def test_traces_sampler_gets_correct_values_in_sampling_context(
    run_lambda_function,
    DictionaryContaining,  # noqa: N803
    ObjectDescribedBy,  # noqa: N803
    StringContaining,  # noqa: N803
):
    # TODO: This whole thing is a little hacky, specifically around the need to
    # get `conftest.py` code into the AWS runtime, which is why there's both
    # `inspect.getsource` and a copy of `_safe_is_equal` included directly in
    # the code below. Ideas which have been discussed to fix this:

    # - Include the test suite as a module installed in the package which is
    #   shot up to AWS
    # - In client.py, copy `conftest.py` (or wherever the necessary code lives)
    #   from the test suite into the main SDK directory so it gets included as
    #   "part of the SDK"

    # It's also worth noting why it's necessary to run the assertions in the AWS
    # runtime rather than asserting on side effects the way we do with events
    # and envelopes. The reasons are two-fold:

    # - We're testing against the `LambdaContext` class, which only exists in
    #   the AWS runtime
    # - If we were to transmit call args data they way we transmit event and
    #   envelope data (through JSON), we'd quickly run into the problem that all
    #   sorts of stuff isn't serializable by `json.dumps` out of the box, up to
    #   and including `datetime` objects (so anything with a timestamp is
    #   automatically out)

    # Perhaps these challenges can be solved in a cleaner and more systematic
    # way if we ever decide to refactor the entire AWS testing apparatus.

    import inspect

    _, response = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(inspect.getsource(StringContaining))
        + dedent(inspect.getsource(DictionaryContaining))
        + dedent(inspect.getsource(ObjectDescribedBy))
        + dedent(
            """
            from unittest import mock

            def _safe_is_equal(x, y):
                # copied from conftest.py - see docstring and comments there
                try:
                    is_equal = x.__eq__(y)
                except AttributeError:
                    is_equal = NotImplemented

                if is_equal == NotImplemented:
                    # using == smoothes out weird variations exposed by raw __eq__
                    return x == y

                return is_equal

            def test_handler(event, context):
                # this runs after the transaction has started, which means we
                # can make assertions about traces_sampler
                try:
                    traces_sampler.assert_any_call(
                        DictionaryContaining(
                            {
                                "aws_event": DictionaryContaining({
                                    "httpMethod": "GET",
                                    "path": "/sit/stay/rollover",
                                    "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"},
                                }),
                                "aws_context": ObjectDescribedBy(
                                    type=get_lambda_bootstrap().LambdaContext,
                                    attrs={
                                        'function_name': StringContaining("test_"),
                                        'function_version': '$LATEST',
                                    }
                                )
                            }
                        )
                    )
                except AssertionError:
                    # catch the error and return it because the error itself will
                    # get swallowed by the SDK as an "internal exception"
                    return {"AssertionError raised": True,}

                return {"AssertionError raised": False,}


            traces_sampler = mock.Mock(return_value=True)

            init_sdk(
                traces_sampler=traces_sampler,
            )
        """
        ),
        b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"}}',
    )

    assert response["Payload"]["AssertionError raised"] is False


@pytest.mark.xfail(
    reason="The limited log output we depend on is being clogged by a new warning"
)
def test_serverless_no_code_instrumentation(run_lambda_function):
    """
    Test that ensures that just by adding a lambda layer containing the
    python sdk, with no code changes sentry is able to capture errors
    """

    for initial_handler in [
        None,
        "test_dir/test_lambda.test_handler",
        "test_dir.test_lambda.test_handler",
    ]:
        print("Testing Initial Handler ", initial_handler)
        _, response = run_lambda_function(
            dedent(
                """
            import sentry_sdk

            def test_handler(event, context):
                current_client = sentry_sdk.get_client()

                assert current_client.is_active()

                assert len(current_client.options['integrations']) == 1
                assert isinstance(current_client.options['integrations'][0],
                                  sentry_sdk.integrations.aws_lambda.AwsLambdaIntegration)

                raise Exception("Oh!")
            """
            ),
            b'{"foo": "bar"}',
            layer=True,
            initial_handler=initial_handler,
        )
        assert response["FunctionError"] == "Unhandled"
        assert response["StatusCode"] == 200

        assert response["Payload"]["errorType"] != "AssertionError"

        assert response["Payload"]["errorType"] == "Exception"
        assert response["Payload"]["errorMessage"] == "Oh!"

        assert "sentry_handler" in response["LogResult"][3].decode("utf-8")


@pytest.mark.xfail(
    reason="The limited log output we depend on is being clogged by a new warning"
)
def test_error_has_new_trace_context_performance_enabled(run_lambda_function):
    envelope_items, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)

        def test_handler(event, context):
            sentry_sdk.capture_message("hi")
            raise Exception("Oh!")
        """
        ),
        payload=b'{"foo": "bar"}',
    )

    (msg_event, error_event, transaction_event) = envelope_items

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
    )


def test_error_has_new_trace_context_performance_disabled(run_lambda_function):
    envelope_items, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=None) # this is the default, just added for clarity

        def test_handler(event, context):
            sentry_sdk.capture_message("hi")
            raise Exception("Oh!")
        """
        ),
        payload=b'{"foo": "bar"}',
    )

    (msg_event, error_event) = envelope_items

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.xfail(
    reason="The limited log output we depend on is being clogged by a new warning"
)
def test_error_has_existing_trace_context_performance_enabled(run_lambda_function):
    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    # We simulate here AWS Api Gateway's behavior of passing HTTP headers
    # as the `headers` dict in the event passed to the Lambda function.
    payload = {
        "headers": {
            "sentry-trace": sentry_trace_header,
        }
    }

    envelope_items, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)

        def test_handler(event, context):
            sentry_sdk.capture_message("hi")
            raise Exception("Oh!")
        """
        ),
        payload=json.dumps(payload).encode(),
    )

    (msg_event, error_event, transaction_event) = envelope_items

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )


def test_error_has_existing_trace_context_performance_disabled(run_lambda_function):
    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    # We simulate here AWS Api Gateway's behavior of passing HTTP headers
    # as the `headers` dict in the event passed to the Lambda function.
    payload = {
        "headers": {
            "sentry-trace": sentry_trace_header,
        }
    }

    envelope_items, _ = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=None)  # this is the default, just added for clarity

        def test_handler(event, context):
            sentry_sdk.capture_message("hi")
            raise Exception("Oh!")
        """
        ),
        payload=json.dumps(payload).encode(),
    )

    (msg_event, error_event) = envelope_items

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )


def test_basic_with_eventbridge_source(run_lambda_function):
    envelope_items, response = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk()

        def test_handler(event, context):
            raise Exception("Oh!")
        """
        ),
        b'[{"topic":"lps-ranges","partition":1,"offset":0,"timestamp":1701268939207,"timestampType":"CREATE_TIME","key":"REDACTED","value":"REDACTED","headers":[],"eventSourceArn":"REDACTED","bootstrapServers":"REDACTED","eventSource":"aws:kafka","eventSourceKey":"lps-ranges-1"}]',
    )

    assert response["FunctionError"] == "Unhandled"

    (event,) = envelope_items
    assert event["level"] == "error"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "Exception"
    assert exception["value"] == "Oh!"


def test_span_origin(run_lambda_function):
    envelope_items, response = run_lambda_function(
        LAMBDA_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)

        def test_handler(event, context):
            pass
        """
        ),
        b'{"foo": "bar"}',
    )

    (event,) = envelope_items

    assert event["contexts"]["trace"]["origin"] == "auto.function.aws_lambda"
sentry-python-2.18.0/tests/integrations/beam/000077500000000000000000000000001471214654000212255ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/beam/__init__.py000066400000000000000000000000621471214654000233340ustar00rootroot00000000000000import pytest

pytest.importorskip("apache_beam")
sentry-python-2.18.0/tests/integrations/beam/test_beam.py000066400000000000000000000135561471214654000235540ustar00rootroot00000000000000import pytest
import inspect

import dill

from sentry_sdk.integrations.beam import (
    BeamIntegration,
    _wrap_task_call,
    _wrap_inspect_call,
)

from apache_beam.typehints.trivial_inference import instance_to_type
from apache_beam.typehints.decorators import getcallargs_forhints
from apache_beam.transforms.core import DoFn, ParDo, _DoFnParam, CallableWrapperDoFn
from apache_beam.runners.common import DoFnInvoker, DoFnContext
from apache_beam.utils.windowed_value import WindowedValue

try:
    from apache_beam.runners.common import OutputHandler
except ImportError:
    from apache_beam.runners.common import OutputProcessor as OutputHandler


def foo():
    return True


def bar(x, y):
    # print(x + y)
    return True


def baz(x, y=2):
    # print(x + y)
    return True


class A:
    def __init__(self, fn):
        self.r = "We are in A"
        self.fn = fn
        self._inspect_fn = _wrap_inspect_call(self, "fn")

    def process(self):
        return self.fn()


class B(A):
    def fa(self, x, element=False, another_element=False):
        if x or (element and not another_element):
            # print(self.r)
            return True
        1 / 0
        return False

    def __init__(self):
        self.r = "We are in B"
        super().__init__(self.fa)


class SimpleFunc(DoFn):
    def process(self, x):
        if x:
            1 / 0
        return [True]


class PlaceHolderFunc(DoFn):
    def process(self, x, timestamp=DoFn.TimestampParam, wx=DoFn.WindowParam):
        if isinstance(timestamp, _DoFnParam) or isinstance(wx, _DoFnParam):
            raise Exception("Bad instance")
        if x:
            1 / 0
        yield True


def fail(x):
    if x:
        1 / 0
    return [True]


test_parent = A(foo)
test_child = B()
test_simple = SimpleFunc()
test_place_holder = PlaceHolderFunc()
test_callable = CallableWrapperDoFn(fail)


# Cannot call simple functions or placeholder test.
@pytest.mark.parametrize(
    "obj,f,args,kwargs",
    [
        [test_parent, "fn", (), {}],
        [test_child, "fn", (False,), {"element": True}],
        [test_child, "fn", (True,), {}],
        [test_simple, "process", (False,), {}],
        [test_callable, "process", (False,), {}],
    ],
)
def test_monkey_patch_call(obj, f, args, kwargs):
    func = getattr(obj, f)

    assert func(*args, **kwargs)
    assert _wrap_task_call(func)(*args, **kwargs)


@pytest.mark.parametrize("f", [foo, bar, baz, test_parent.fn, test_child.fn])
def test_monkey_patch_pickle(f):
    f_temp = _wrap_task_call(f)
    assert dill.pickles(f_temp), "{} is not pickling correctly!".format(f)

    # Pickle everything
    s1 = dill.dumps(f_temp)
    s2 = dill.loads(s1)
    dill.dumps(s2)


@pytest.mark.parametrize(
    "f,args,kwargs",
    [
        [foo, (), {}],
        [bar, (1, 5), {}],
        [baz, (1,), {}],
        [test_parent.fn, (), {}],
        [test_child.fn, (False,), {"element": True}],
        [test_child.fn, (True,), {}],
    ],
)
def test_monkey_patch_signature(f, args, kwargs):
    arg_types = [instance_to_type(v) for v in args]
    kwargs_types = {k: instance_to_type(v) for (k, v) in kwargs.items()}
    f_temp = _wrap_task_call(f)
    try:
        getcallargs_forhints(f, *arg_types, **kwargs_types)
    except Exception:
        print("Failed on {} with parameters {}, {}".format(f, args, kwargs))
        raise
    try:
        getcallargs_forhints(f_temp, *arg_types, **kwargs_types)
    except Exception:
        print("Failed on {} with parameters {}, {}".format(f_temp, args, kwargs))
        raise
    try:
        expected_signature = inspect.signature(f)
        test_signature = inspect.signature(f_temp)
        assert (
            expected_signature == test_signature
        ), "Failed on {}, signature {} does not match {}".format(
            f, expected_signature, test_signature
        )
    except Exception:
        # expected to pass for py2.7
        pass


class _OutputHandler(OutputHandler):
    def process_outputs(
        self, windowed_input_element, results, watermark_estimator=None
    ):
        self.handle_process_outputs(
            windowed_input_element, results, watermark_estimator
        )

    def handle_process_outputs(
        self, windowed_input_element, results, watermark_estimator=None
    ):
        print(windowed_input_element)
        try:
            for result in results:
                assert result
        except StopIteration:
            print("In here")


@pytest.fixture
def init_beam(sentry_init):
    def inner(fn):
        sentry_init(default_integrations=False, integrations=[BeamIntegration()])
        # Little hack to avoid having to run the whole pipeline.
        pardo = ParDo(fn)
        signature = pardo._signature
        output_processor = _OutputHandler()
        return DoFnInvoker.create_invoker(
            signature,
            output_processor,
            DoFnContext("test"),
            input_args=[],
            input_kwargs={},
        )

    return inner


@pytest.mark.parametrize("fn", [test_simple, test_callable, test_place_holder])
def test_invoker_normal(init_beam, fn):
    invoker = init_beam(fn)
    print("Normal testing {} with {} invoker.".format(fn, invoker))
    windowed_value = WindowedValue(False, 0, [None])
    invoker.invoke_process(windowed_value)


@pytest.mark.parametrize("fn", [test_simple, test_callable, test_place_holder])
def test_invoker_exception(init_beam, capture_events, capture_exceptions, fn):
    invoker = init_beam(fn)
    events = capture_events()

    print("Exception testing {} with {} invoker.".format(fn, invoker))
    # Window value will always have one value for the process to run.
    windowed_value = WindowedValue(True, 0, [None])
    try:
        invoker.invoke_process(windowed_value)
    except Exception:
        pass

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    assert exception["mechanism"]["type"] == "beam"
sentry-python-2.18.0/tests/integrations/boto3/000077500000000000000000000000001471214654000213475ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/boto3/__init__.py000066400000000000000000000003461471214654000234630ustar00rootroot00000000000000import pytest
import os

pytest.importorskip("boto3")
xml_fixture_path = os.path.dirname(os.path.abspath(__file__))


def read_fixture(name):
    with open(os.path.join(xml_fixture_path, name), "rb") as f:
        return f.read()
sentry-python-2.18.0/tests/integrations/boto3/aws_mock.py000066400000000000000000000015531471214654000235300ustar00rootroot00000000000000from io import BytesIO
from botocore.awsrequest import AWSResponse


class Body(BytesIO):
    def stream(self, **kwargs):
        contents = self.read()
        while contents:
            yield contents
            contents = self.read()


class MockResponse:
    def __init__(self, client, status_code, headers, body):
        self._client = client
        self._status_code = status_code
        self._headers = headers
        self._body = body

    def __enter__(self):
        self._client.meta.events.register("before-send", self)
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self._client.meta.events.unregister("before-send", self)

    def __call__(self, request, **kwargs):
        return AWSResponse(
            request.url,
            self._status_code,
            self._headers,
            Body(self._body),
        )
sentry-python-2.18.0/tests/integrations/boto3/s3_list.xml000066400000000000000000000015451471214654000234560ustar00rootroot00000000000000
marshalls-furious-bucket1000urlfalsefoo.txt2020-10-24T00:13:39.000Z"a895ba674b4abd01b5d67cfd7074b827"2064537bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7STANDARDbar.txt2020-10-02T15:15:20.000Z"a895ba674b4abd01b5d67cfd7074b827"2064537bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7STANDARD
sentry-python-2.18.0/tests/integrations/boto3/test_s3.py000066400000000000000000000112551471214654000233110ustar00rootroot00000000000000from unittest import mock

import boto3
import pytest

import sentry_sdk
from sentry_sdk.integrations.boto3 import Boto3Integration
from tests.conftest import ApproxDict
from tests.integrations.boto3 import read_fixture
from tests.integrations.boto3.aws_mock import MockResponse


session = boto3.Session(
    aws_access_key_id="-",
    aws_secret_access_key="-",
)


def test_basic(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
    events = capture_events()

    s3 = session.resource("s3")
    with sentry_sdk.start_transaction() as transaction, MockResponse(
        s3.meta.client, 200, {}, read_fixture("s3_list.xml")
    ):
        bucket = s3.Bucket("bucket")
        items = [obj for obj in bucket.objects.all()]
        assert len(items) == 2
        assert items[0].key == "foo.txt"
        assert items[1].key == "bar.txt"
        transaction.finish()

    (event,) = events
    assert event["type"] == "transaction"
    assert len(event["spans"]) == 1
    (span,) = event["spans"]
    assert span["op"] == "http.client"
    assert span["description"] == "aws.s3.ListObjects"


def test_streaming(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
    events = capture_events()

    s3 = session.resource("s3")
    with sentry_sdk.start_transaction() as transaction, MockResponse(
        s3.meta.client, 200, {}, b"hello"
    ):
        obj = s3.Bucket("bucket").Object("foo.pdf")
        body = obj.get()["Body"]
        assert body.read(1) == b"h"
        assert body.read(2) == b"el"
        assert body.read(3) == b"lo"
        assert body.read(1) == b""
        transaction.finish()

    (event,) = events
    assert event["type"] == "transaction"
    assert len(event["spans"]) == 2

    span1 = event["spans"][0]
    assert span1["op"] == "http.client"
    assert span1["description"] == "aws.s3.GetObject"
    assert span1["data"] == ApproxDict(
        {
            "http.method": "GET",
            "aws.request.url": "https://bucket.s3.amazonaws.com/foo.pdf",
            "http.fragment": "",
            "http.query": "",
        }
    )

    span2 = event["spans"][1]
    assert span2["op"] == "http.client.stream"
    assert span2["description"] == "aws.s3.GetObject"
    assert span2["parent_span_id"] == span1["span_id"]


def test_streaming_close(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
    events = capture_events()

    s3 = session.resource("s3")
    with sentry_sdk.start_transaction() as transaction, MockResponse(
        s3.meta.client, 200, {}, b"hello"
    ):
        obj = s3.Bucket("bucket").Object("foo.pdf")
        body = obj.get()["Body"]
        assert body.read(1) == b"h"
        body.close()  # close partially-read stream
        transaction.finish()

    (event,) = events
    assert event["type"] == "transaction"
    assert len(event["spans"]) == 2
    span1 = event["spans"][0]
    assert span1["op"] == "http.client"
    span2 = event["spans"][1]
    assert span2["op"] == "http.client.stream"


@pytest.mark.tests_internal_exceptions
def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
    events = capture_events()

    s3 = session.resource("s3")

    with mock.patch(
        "sentry_sdk.integrations.boto3.parse_url",
        side_effect=ValueError,
    ):
        with sentry_sdk.start_transaction() as transaction, MockResponse(
            s3.meta.client, 200, {}, read_fixture("s3_list.xml")
        ):
            bucket = s3.Bucket("bucket")
            items = [obj for obj in bucket.objects.all()]
            assert len(items) == 2
            assert items[0].key == "foo.txt"
            assert items[1].key == "bar.txt"
            transaction.finish()

    (event,) = events
    assert event["spans"][0]["data"] == ApproxDict(
        {
            "http.method": "GET",
            # no url data
        }
    )

    assert "aws.request.url" not in event["spans"][0]["data"]
    assert "http.fragment" not in event["spans"][0]["data"]
    assert "http.query" not in event["spans"][0]["data"]


def test_span_origin(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
    events = capture_events()

    s3 = session.resource("s3")
    with sentry_sdk.start_transaction(), MockResponse(
        s3.meta.client, 200, {}, read_fixture("s3_list.xml")
    ):
        bucket = s3.Bucket("bucket")
        _ = [obj for obj in bucket.objects.all()]

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.http.boto3"
sentry-python-2.18.0/tests/integrations/bottle/000077500000000000000000000000001471214654000216125ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/bottle/__init__.py000066400000000000000000000000551471214654000237230ustar00rootroot00000000000000import pytest

pytest.importorskip("bottle")
sentry-python-2.18.0/tests/integrations/bottle/test_bottle.py000066400000000000000000000330511471214654000245160ustar00rootroot00000000000000import json
import pytest
import logging

from io import BytesIO
from bottle import Bottle, debug as set_debug, abort, redirect, HTTPResponse
from sentry_sdk import capture_message
from sentry_sdk.integrations.bottle import BottleIntegration
from sentry_sdk.serializer import MAX_DATABAG_BREADTH

from sentry_sdk.integrations.logging import LoggingIntegration
from werkzeug.test import Client
from werkzeug.wrappers import Response

import sentry_sdk.integrations.bottle as bottle_sentry


@pytest.fixture(scope="function")
def app(sentry_init):
    app = Bottle()

    @app.route("/message")
    def hi():
        capture_message("hi")
        return "ok"

    @app.route("/message/")
    def hi_with_id(message_id):
        capture_message("hi")
        return "ok"

    @app.route("/message-named-route", name="hi")
    def named_hi():
        capture_message("hi")
        return "ok"

    yield app


@pytest.fixture
def get_client(app):
    def inner():
        return Client(app)

    return inner


def test_has_context(sentry_init, app, capture_events, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
    events = capture_events()

    client = get_client()
    response = client.get("/message")
    assert response[1] == "200 OK"

    (event,) = events
    assert event["message"] == "hi"
    assert "data" not in event["request"]
    assert event["request"]["url"] == "http://localhost/message"


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        ("/message", "endpoint", "hi", "component"),
        ("/message", "url", "/message", "route"),
        ("/message/123456", "url", "/message/", "route"),
        ("/message-named-route", "endpoint", "hi", "component"),
    ],
)
def test_transaction_style(
    sentry_init,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
    capture_events,
    get_client,
):
    sentry_init(
        integrations=[
            bottle_sentry.BottleIntegration(transaction_style=transaction_style)
        ]
    )
    events = capture_events()

    client = get_client()
    response = client.get(url)
    assert response[1] == "200 OK"

    (event,) = events
    # We use endswith() because in Python 2.7 it is "test_bottle.hi"
    # and in later Pythons "test_bottle.app..hi"
    assert event["transaction"].endswith(expected_transaction)
    assert event["transaction_info"] == {"source": expected_source}


@pytest.mark.parametrize("debug", (True, False), ids=["debug", "nodebug"])
@pytest.mark.parametrize("catchall", (True, False), ids=["catchall", "nocatchall"])
def test_errors(
    sentry_init, capture_exceptions, capture_events, app, debug, catchall, get_client
):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    app.catchall = catchall
    set_debug(mode=debug)

    exceptions = capture_exceptions()
    events = capture_events()

    @app.route("/")
    def index():
        1 / 0

    client = get_client()
    try:
        client.get("/")
    except ZeroDivisionError:
        pass

    (exc,) = exceptions
    assert isinstance(exc, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "bottle"
    assert event["exception"]["values"][0]["mechanism"]["handled"] is False


def test_large_json_request(sentry_init, capture_events, app, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    data = {"foo": {"bar": "a" * 2000}}

    @app.route("/", method="POST")
    def index():
        import bottle

        assert bottle.request.json == data
        assert bottle.request.body.read() == json.dumps(data).encode("ascii")
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()
    response = client.get("/")

    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response[1] == "200 OK"

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]["bar"]) == 1024


@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_empty_json_request(sentry_init, capture_events, app, data, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    @app.route("/", method="POST")
    def index():
        import bottle

        assert bottle.request.json == data
        assert bottle.request.body.read() == json.dumps(data).encode("ascii")
        # assert not bottle.request.forms
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()
    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response[1] == "200 OK"

    (event,) = events
    assert event["request"]["data"] == data


def test_medium_formdata_request(sentry_init, capture_events, app, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    data = {"foo": "a" * 2000}

    @app.route("/", method="POST")
    def index():
        import bottle

        assert bottle.request.forms["foo"] == data["foo"]
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()
    response = client.post("/", data=data)
    assert response[1] == "200 OK"

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]) == 1024


@pytest.mark.parametrize("input_char", ["a", b"a"])
def test_too_large_raw_request(
    sentry_init, input_char, capture_events, app, get_client
):
    sentry_init(
        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="small"
    )

    data = input_char * 2000

    @app.route("/", method="POST")
    def index():
        import bottle

        if isinstance(data, bytes):
            assert bottle.request.body.read() == data
        else:
            assert bottle.request.body.read() == data.encode("ascii")
        assert not bottle.request.json
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()
    response = client.post("/", data=data)
    assert response[1] == "200 OK"

    (event,) = events
    assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
    assert not event["request"]["data"]


def test_files_and_form(sentry_init, capture_events, app, get_client):
    sentry_init(
        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="always"
    )

    data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}

    @app.route("/", method="POST")
    def index():
        import bottle

        assert list(bottle.request.forms) == ["foo"]
        assert list(bottle.request.files) == ["file"]
        assert not bottle.request.json
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()
    response = client.post("/", data=data)
    assert response[1] == "200 OK"

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]) == 1024

    assert event["_meta"]["request"]["data"]["file"] == {
        "": {
            "rem": [["!raw", "x"]],
        }
    }
    assert not event["request"]["data"]["file"]


def test_json_not_truncated_if_max_request_body_size_is_always(
    sentry_init, capture_events, app, get_client
):
    sentry_init(
        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="always"
    )

    data = {
        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
    }

    @app.route("/", method="POST")
    def index():
        import bottle

        assert bottle.request.json == data
        assert bottle.request.body.read() == json.dumps(data).encode("ascii")
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = get_client()

    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response[1] == "200 OK"

    (event,) = events
    assert event["request"]["data"] == data


@pytest.mark.parametrize(
    "integrations",
    [
        [bottle_sentry.BottleIntegration()],
        [bottle_sentry.BottleIntegration(), LoggingIntegration(event_level="ERROR")],
    ],
)
def test_errors_not_reported_twice(
    sentry_init, integrations, capture_events, app, get_client
):
    sentry_init(integrations=integrations)

    app.catchall = False

    logger = logging.getLogger("bottle.app")

    @app.route("/")
    def index():
        try:
            1 / 0
        except Exception as e:
            logger.exception(e)
            raise e

    events = capture_events()

    client = get_client()
    with pytest.raises(ZeroDivisionError):
        client.get("/")

    assert len(events) == 1


def test_mount(app, capture_exceptions, capture_events, sentry_init, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    app.catchall = False

    def crashing_app(environ, start_response):
        1 / 0

    app.mount("/wsgi/", crashing_app)

    client = Client(app)

    exceptions = capture_exceptions()
    events = capture_events()

    with pytest.raises(ZeroDivisionError) as exc:
        client.get("/wsgi/")

    (error,) = exceptions

    assert error is exc.value

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "bottle"
    assert event["exception"]["values"][0]["mechanism"]["handled"] is False


def test_error_in_errorhandler(sentry_init, capture_events, app, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])

    set_debug(False)
    app.catchall = True

    @app.route("/")
    def index():
        raise ValueError()

    @app.error(500)
    def error_handler(err):
        1 / 0

    events = capture_events()

    client = get_client()

    with pytest.raises(ZeroDivisionError):
        client.get("/")

    event1, event2 = events

    (exception,) = event1["exception"]["values"]
    assert exception["type"] == "ValueError"

    exception = event2["exception"]["values"][0]
    assert exception["type"] == "ZeroDivisionError"


def test_bad_request_not_captured(sentry_init, capture_events, app, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
    events = capture_events()

    @app.route("/")
    def index():
        abort(400, "bad request in")

    client = get_client()

    client.get("/")

    assert not events


def test_no_exception_on_redirect(sentry_init, capture_events, app, get_client):
    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
    events = capture_events()

    @app.route("/")
    def index():
        redirect("/here")

    @app.route("/here")
    def here():
        return "here"

    client = get_client()

    client.get("/")

    assert not events


def test_span_origin(
    sentry_init,
    get_client,
    capture_events,
):
    sentry_init(
        integrations=[bottle_sentry.BottleIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = get_client()
    client.get("/message")

    (_, event) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.bottle"


@pytest.mark.parametrize("raise_error", [True, False])
@pytest.mark.parametrize(
    ("integration_kwargs", "status_code", "should_capture"),
    (
        ({}, None, False),
        ({}, 400, False),
        ({}, 451, False),  # Highest 4xx status code
        ({}, 500, True),
        ({}, 511, True),  # Highest 5xx status code
        ({"failed_request_status_codes": set()}, 500, False),
        ({"failed_request_status_codes": set()}, 511, False),
        ({"failed_request_status_codes": {404, *range(500, 600)}}, 404, True),
        ({"failed_request_status_codes": {404, *range(500, 600)}}, 500, True),
        ({"failed_request_status_codes": {404, *range(500, 600)}}, 400, False),
    ),
)
def test_failed_request_status_codes(
    sentry_init,
    capture_events,
    integration_kwargs,
    status_code,
    should_capture,
    raise_error,
):
    sentry_init(integrations=[BottleIntegration(**integration_kwargs)])
    events = capture_events()

    app = Bottle()

    @app.route("/")
    def handle():
        if status_code is not None:
            response = HTTPResponse(status=status_code)
            if raise_error:
                raise response
            else:
                return response
        return "OK"

    client = Client(app, Response)
    response = client.get("/")

    expected_status = 200 if status_code is None else status_code
    assert response.status_code == expected_status

    if should_capture:
        (event,) = events
        assert event["exception"]["values"][0]["type"] == "HTTPResponse"
    else:
        assert not events


def test_failed_request_status_codes_non_http_exception(sentry_init, capture_events):
    """
    If an exception, which is not an instance of HTTPResponse, is raised, it should be captured, even if
    failed_request_status_codes is empty.
    """
    sentry_init(integrations=[BottleIntegration(failed_request_status_codes=set())])
    events = capture_events()

    app = Bottle()

    @app.route("/")
    def handle():
        1 / 0

    client = Client(app, Response)

    try:
        client.get("/")
    except ZeroDivisionError:
        pass

    (event,) = events
    assert event["exception"]["values"][0]["type"] == "ZeroDivisionError"
sentry-python-2.18.0/tests/integrations/celery/000077500000000000000000000000001471214654000216045ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/celery/__init__.py000066400000000000000000000000551471214654000237150ustar00rootroot00000000000000import pytest

pytest.importorskip("celery")
sentry-python-2.18.0/tests/integrations/celery/integration_tests/000077500000000000000000000000001471214654000253515ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/celery/integration_tests/__init__.py000066400000000000000000000027331471214654000274670ustar00rootroot00000000000000import os
import signal
import tempfile
import threading
import time

from celery.beat import Scheduler

from sentry_sdk.utils import logger


class ImmediateScheduler(Scheduler):
    """
    A custom scheduler that starts tasks immediately after starting Celery beat.
    """

    def setup_schedule(self):
        super().setup_schedule()
        for _, entry in self.schedule.items():
            self.apply_entry(entry)

    def tick(self):
        # Override tick to prevent the normal schedule cycle
        return 1


def kill_beat(beat_pid_file, delay_seconds=1):
    """
    Terminates Celery Beat after the given `delay_seconds`.
    """
    logger.info("Starting Celery Beat killer...")
    time.sleep(delay_seconds)
    pid = int(open(beat_pid_file, "r").read())
    logger.info("Terminating Celery Beat...")
    os.kill(pid, signal.SIGTERM)


def run_beat(celery_app, runtime_seconds=1, loglevel="warning", quiet=True):
    """
    Run Celery Beat that immediately starts tasks.
    The Celery Beat instance is automatically terminated after `runtime_seconds`.
    """
    logger.info("Starting Celery Beat...")
    pid_file = os.path.join(tempfile.mkdtemp(), f"celery-beat-{os.getpid()}.pid")

    t = threading.Thread(
        target=kill_beat,
        args=(pid_file,),
        kwargs={"delay_seconds": runtime_seconds},
    )
    t.start()

    beat_instance = celery_app.Beat(
        loglevel=loglevel,
        quiet=quiet,
        pidfile=pid_file,
    )
    beat_instance.run()
sentry-python-2.18.0/tests/integrations/celery/integration_tests/test_celery_beat_cron_monitoring.py000066400000000000000000000105441471214654000345320ustar00rootroot00000000000000import os
import pytest

from celery.contrib.testing.worker import start_worker

from sentry_sdk.utils import logger

from tests.integrations.celery.integration_tests import run_beat


REDIS_SERVER = "redis://127.0.0.1:6379"
REDIS_DB = 15


@pytest.fixture()
def celery_config():
    return {
        "worker_concurrency": 1,
        "broker_url": f"{REDIS_SERVER}/{REDIS_DB}",
        "result_backend": f"{REDIS_SERVER}/{REDIS_DB}",
        "beat_scheduler": "tests.integrations.celery.integration_tests:ImmediateScheduler",
        "task_always_eager": False,
        "task_create_missing_queues": True,
        "task_default_queue": f"queue_{os.getpid()}",
    }


@pytest.fixture
def celery_init(sentry_init, celery_config):
    """
    Create a Sentry instrumented Celery app.
    """
    from celery import Celery

    from sentry_sdk.integrations.celery import CeleryIntegration

    def inner(propagate_traces=True, monitor_beat_tasks=False, **kwargs):
        sentry_init(
            integrations=[
                CeleryIntegration(
                    propagate_traces=propagate_traces,
                    monitor_beat_tasks=monitor_beat_tasks,
                )
            ],
            **kwargs,
        )
        app = Celery("tasks")
        app.conf.update(celery_config)

        return app

    return inner


@pytest.mark.forked
def test_explanation(celery_init, capture_envelopes):
    """
    This is a dummy test for explaining how to test using Celery Beat
    """

    # First initialize a Celery app.
    # You can give the options of CeleryIntegrations
    # and the options for `sentry_dks.init` as keyword arguments.
    # See the celery_init fixture for details.
    app = celery_init(
        monitor_beat_tasks=True,
    )

    # Capture envelopes.
    envelopes = capture_envelopes()

    # Define the task you want to run
    @app.task
    def test_task():
        logger.info("Running test_task")

    # Add the task to the beat schedule
    app.add_periodic_task(60.0, test_task.s(), name="success_from_beat")

    # Start a Celery worker
    with start_worker(app, perform_ping_check=False):
        # And start a Celery Beat instance
        # This Celery Beat will start the task above immediately
        # after start for the first time
        # By default Celery Beat is terminated after 1 second.
        # See `run_beat` function on how to change this.
        run_beat(app)

    # After the Celery Beat is terminated, you can check the envelopes
    assert len(envelopes) >= 0


@pytest.mark.forked
def test_beat_task_crons_success(celery_init, capture_envelopes):
    app = celery_init(
        monitor_beat_tasks=True,
    )
    envelopes = capture_envelopes()

    @app.task
    def test_task():
        logger.info("Running test_task")

    app.add_periodic_task(60.0, test_task.s(), name="success_from_beat")

    with start_worker(app, perform_ping_check=False):
        run_beat(app)

    assert len(envelopes) == 2
    (envelop_in_progress, envelope_ok) = envelopes

    assert envelop_in_progress.items[0].headers["type"] == "check_in"
    check_in = envelop_in_progress.items[0].payload.json
    assert check_in["type"] == "check_in"
    assert check_in["monitor_slug"] == "success_from_beat"
    assert check_in["status"] == "in_progress"

    assert envelope_ok.items[0].headers["type"] == "check_in"
    check_in = envelope_ok.items[0].payload.json
    assert check_in["type"] == "check_in"
    assert check_in["monitor_slug"] == "success_from_beat"
    assert check_in["status"] == "ok"


@pytest.mark.forked
def test_beat_task_crons_error(celery_init, capture_envelopes):
    app = celery_init(
        monitor_beat_tasks=True,
    )
    envelopes = capture_envelopes()

    @app.task
    def test_task():
        logger.info("Running test_task")
        1 / 0

    app.add_periodic_task(60.0, test_task.s(), name="failure_from_beat")

    with start_worker(app, perform_ping_check=False):
        run_beat(app)

    envelop_in_progress = envelopes[0]
    envelope_error = envelopes[-1]

    check_in = envelop_in_progress.items[0].payload.json
    assert check_in["type"] == "check_in"
    assert check_in["monitor_slug"] == "failure_from_beat"
    assert check_in["status"] == "in_progress"

    check_in = envelope_error.items[0].payload.json
    assert check_in["type"] == "check_in"
    assert check_in["monitor_slug"] == "failure_from_beat"
    assert check_in["status"] == "error"
sentry-python-2.18.0/tests/integrations/celery/test_celery.py000066400000000000000000000617571471214654000245200ustar00rootroot00000000000000import threading
import kombu
from unittest import mock

import pytest
from celery import Celery, VERSION
from celery.bin import worker

import sentry_sdk
from sentry_sdk import start_transaction, get_current_span
from sentry_sdk.integrations.celery import (
    CeleryIntegration,
    _wrap_task_run,
)
from sentry_sdk.integrations.celery.beat import _get_headers
from tests.conftest import ApproxDict


@pytest.fixture
def connect_signal(request):
    def inner(signal, f):
        signal.connect(f)
        request.addfinalizer(lambda: signal.disconnect(f))

    return inner


@pytest.fixture
def init_celery(sentry_init, request):
    def inner(
        propagate_traces=True,
        backend="always_eager",
        monitor_beat_tasks=False,
        **kwargs,
    ):
        sentry_init(
            integrations=[
                CeleryIntegration(
                    propagate_traces=propagate_traces,
                    monitor_beat_tasks=monitor_beat_tasks,
                )
            ],
            **kwargs,
        )
        celery = Celery(__name__)

        if backend == "always_eager":
            if VERSION < (4,):
                celery.conf.CELERY_ALWAYS_EAGER = True
            else:
                celery.conf.task_always_eager = True
        elif backend == "redis":
            # broken on celery 3
            if VERSION < (4,):
                pytest.skip("Redis backend broken for some reason")

            # this backend requires capture_events_forksafe
            celery.conf.worker_max_tasks_per_child = 1
            celery.conf.worker_concurrency = 1
            celery.conf.broker_url = "redis://127.0.0.1:6379"
            celery.conf.result_backend = "redis://127.0.0.1:6379"
            celery.conf.task_always_eager = False

            # Once we drop celery 3 we can use the celery_worker fixture
            if VERSION < (5,):
                worker_fn = worker.worker(app=celery).run
            else:
                from celery.bin.base import CLIContext

                worker_fn = lambda: worker.worker(
                    obj=CLIContext(app=celery, no_color=True, workdir=".", quiet=False),
                    args=[],
                )

            worker_thread = threading.Thread(target=worker_fn)
            worker_thread.daemon = True
            worker_thread.start()
        else:
            raise ValueError(backend)

        return celery

    return inner


@pytest.fixture
def celery(init_celery):
    return init_celery()


@pytest.fixture(
    params=[
        lambda task, x, y: (
            task.delay(x, y),
            {"args": [x, y], "kwargs": {}},
        ),
        lambda task, x, y: (
            task.apply_async((x, y)),
            {"args": [x, y], "kwargs": {}},
        ),
        lambda task, x, y: (
            task.apply_async(args=(x, y)),
            {"args": [x, y], "kwargs": {}},
        ),
        lambda task, x, y: (
            task.apply_async(kwargs=dict(x=x, y=y)),
            {"args": [], "kwargs": {"x": x, "y": y}},
        ),
    ]
)
def celery_invocation(request):
    """
    Invokes a task in multiple ways Celery allows you to (testing our apply_async monkeypatch).

    Currently limited to a task signature of the form foo(x, y)
    """
    return request.param


def test_simple_with_performance(capture_events, init_celery, celery_invocation):
    celery = init_celery(traces_sample_rate=1.0)
    events = capture_events()

    @celery.task(name="dummy_task")
    def dummy_task(x, y):
        foo = 42  # noqa
        return x / y

    with start_transaction(op="unit test transaction") as transaction:
        celery_invocation(dummy_task, 1, 2)
        _, expected_context = celery_invocation(dummy_task, 1, 0)

    (_, error_event, _, _) = events

    assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert error_event["contexts"]["trace"]["span_id"] != transaction.span_id
    assert error_event["transaction"] == "dummy_task"
    assert "celery_task_id" in error_event["tags"]
    assert error_event["extra"]["celery-job"] == dict(
        task_name="dummy_task", **expected_context
    )

    (exception,) = error_event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    assert exception["mechanism"]["type"] == "celery"
    assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"


def test_simple_without_performance(capture_events, init_celery, celery_invocation):
    celery = init_celery(traces_sample_rate=None)
    events = capture_events()

    @celery.task(name="dummy_task")
    def dummy_task(x, y):
        foo = 42  # noqa
        return x / y

    scope = sentry_sdk.get_isolation_scope()

    celery_invocation(dummy_task, 1, 2)
    _, expected_context = celery_invocation(dummy_task, 1, 0)

    (error_event,) = events

    assert (
        error_event["contexts"]["trace"]["trace_id"]
        == scope._propagation_context.trace_id
    )
    assert (
        error_event["contexts"]["trace"]["span_id"]
        != scope._propagation_context.span_id
    )
    assert error_event["transaction"] == "dummy_task"
    assert "celery_task_id" in error_event["tags"]
    assert error_event["extra"]["celery-job"] == dict(
        task_name="dummy_task", **expected_context
    )

    (exception,) = error_event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    assert exception["mechanism"]["type"] == "celery"
    assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"


@pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
def test_transaction_events(capture_events, init_celery, celery_invocation, task_fails):
    celery = init_celery(traces_sample_rate=1.0)

    @celery.task(name="dummy_task")
    def dummy_task(x, y):
        return x / y

    # XXX: For some reason the first call does not get instrumented properly.
    celery_invocation(dummy_task, 1, 1)

    events = capture_events()

    with start_transaction(name="submission") as transaction:
        celery_invocation(dummy_task, 1, 0 if task_fails else 1)

    if task_fails:
        error_event = events.pop(0)
        assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
        assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"

    execution_event, submission_event = events
    assert execution_event["transaction"] == "dummy_task"
    assert execution_event["transaction_info"] == {"source": "task"}

    assert submission_event["transaction"] == "submission"
    assert submission_event["transaction_info"] == {"source": "custom"}

    assert execution_event["type"] == submission_event["type"] == "transaction"
    assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert submission_event["contexts"]["trace"]["trace_id"] == transaction.trace_id

    if task_fails:
        assert execution_event["contexts"]["trace"]["status"] == "internal_error"
    else:
        assert execution_event["contexts"]["trace"]["status"] == "ok"

    assert len(execution_event["spans"]) == 1
    assert (
        execution_event["spans"][0].items()
        >= {
            "trace_id": str(transaction.trace_id),
            "same_process_as_parent": True,
            "op": "queue.process",
            "description": "dummy_task",
            "data": ApproxDict(),
        }.items()
    )
    assert submission_event["spans"] == [
        {
            "data": ApproxDict(),
            "description": "dummy_task",
            "op": "queue.submit.celery",
            "origin": "auto.queue.celery",
            "parent_span_id": submission_event["contexts"]["trace"]["span_id"],
            "same_process_as_parent": True,
            "span_id": submission_event["spans"][0]["span_id"],
            "start_timestamp": submission_event["spans"][0]["start_timestamp"],
            "timestamp": submission_event["spans"][0]["timestamp"],
            "trace_id": str(transaction.trace_id),
        }
    ]


def test_no_stackoverflows(celery):
    """We used to have a bug in the Celery integration where its monkeypatching
    was repeated for every task invocation, leading to stackoverflows.

    See https://github.com/getsentry/sentry-python/issues/265
    """

    results = []

    @celery.task(name="dummy_task")
    def dummy_task():
        sentry_sdk.get_isolation_scope().set_tag("foo", "bar")
        results.append(42)

    for _ in range(10000):
        dummy_task.delay()

    assert results == [42] * 10000
    assert not sentry_sdk.get_isolation_scope()._tags


def test_simple_no_propagation(capture_events, init_celery):
    celery = init_celery(propagate_traces=False)
    events = capture_events()

    @celery.task(name="dummy_task")
    def dummy_task():
        1 / 0

    with start_transaction() as transaction:
        dummy_task.delay()

    (event,) = events
    assert event["contexts"]["trace"]["trace_id"] != transaction.trace_id
    assert event["transaction"] == "dummy_task"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"


def test_ignore_expected(capture_events, celery):
    events = capture_events()

    @celery.task(name="dummy_task", throws=(ZeroDivisionError,))
    def dummy_task(x, y):
        return x / y

    dummy_task.delay(1, 2)
    dummy_task.delay(1, 0)
    assert not events


@pytest.mark.xfail(
    (4, 2, 0) <= VERSION < (4, 4, 3),
    strict=True,
    reason="https://github.com/celery/celery/issues/4661",
)
def test_retry(celery, capture_events):
    events = capture_events()
    failures = [True, True, False]
    runs = []

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self):
        runs.append(1)
        try:
            if failures.pop(0):
                1 / 0
        except Exception as exc:
            self.retry(max_retries=2, exc=exc)

    dummy_task.delay()

    assert len(runs) == 3
    assert not events

    failures = [True, True, True]
    runs = []

    dummy_task.delay()

    assert len(runs) == 3
    (event,) = events
    exceptions = event["exception"]["values"]

    for e in exceptions:
        assert e["type"] == "ZeroDivisionError"


@pytest.mark.skip(
    reason="This test is hanging when running test with `tox --parallel auto`. TODO: Figure out why and fix it!"
)
@pytest.mark.forked
def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe):
    celery = init_celery(traces_sample_rate=1.0, backend="redis")

    events = capture_events_forksafe()

    runs = []

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self):
        runs.append(1)
        1 / 0

    with start_transaction(name="submit_celery"):
        # Curious: Cannot use delay() here or py2.7-celery-4.2 crashes
        res = dummy_task.apply_async()

    with pytest.raises(Exception):  # noqa: B017
        # Celery 4.1 raises a gibberish exception
        res.wait()

    # if this is nonempty, the worker never really forked
    assert not runs

    submit_transaction = events.read_event()
    assert submit_transaction["type"] == "transaction"
    assert submit_transaction["transaction"] == "submit_celery"

    assert len(
        submit_transaction["spans"]
    ), 4  # Because redis integration was auto enabled
    span = submit_transaction["spans"][0]
    assert span["op"] == "queue.submit.celery"
    assert span["description"] == "dummy_task"

    event = events.read_event()
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"

    transaction = events.read_event()
    assert (
        transaction["contexts"]["trace"]["trace_id"]
        == event["contexts"]["trace"]["trace_id"]
        == submit_transaction["contexts"]["trace"]["trace_id"]
    )

    events.read_flush()

    # if this is nonempty, the worker never really forked
    assert not runs


@pytest.mark.forked
@pytest.mark.parametrize("newrelic_order", ["sentry_first", "sentry_last"])
def test_newrelic_interference(init_celery, newrelic_order, celery_invocation):
    def instrument_newrelic():
        try:
            # older newrelic versions
            from newrelic.hooks.application_celery import (
                instrument_celery_execute_trace,
            )
            import celery.app.trace as celery_trace_module

            assert hasattr(celery_trace_module, "build_tracer")
            instrument_celery_execute_trace(celery_trace_module)

        except ImportError:
            # newer newrelic versions
            from newrelic.hooks.application_celery import instrument_celery_app_base
            import celery.app as celery_app_module

            assert hasattr(celery_app_module, "Celery")
            assert hasattr(celery_app_module.Celery, "send_task")
            instrument_celery_app_base(celery_app_module)

    if newrelic_order == "sentry_first":
        celery = init_celery()
        instrument_newrelic()
    elif newrelic_order == "sentry_last":
        instrument_newrelic()
        celery = init_celery()
    else:
        raise ValueError(newrelic_order)

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self, x, y):
        return x / y

    assert dummy_task.apply(kwargs={"x": 1, "y": 1}).wait() == 1
    assert celery_invocation(dummy_task, 1, 1)[0].wait() == 1


def test_traces_sampler_gets_task_info_in_sampling_context(
    init_celery, celery_invocation, DictionaryContaining  # noqa:N803
):
    traces_sampler = mock.Mock()
    celery = init_celery(traces_sampler=traces_sampler)

    @celery.task(name="dog_walk")
    def walk_dogs(x, y):
        dogs, route = x
        num_loops = y
        return dogs, route, num_loops

    _, args_kwargs = celery_invocation(
        walk_dogs, [["Maisey", "Charlie", "Bodhi", "Cory"], "Dog park round trip"], 1
    )

    traces_sampler.assert_any_call(
        # depending on the iteration of celery_invocation, the data might be
        # passed as args or as kwargs, so make this generic
        DictionaryContaining({"celery_job": dict(task="dog_walk", **args_kwargs)})
    )


def test_abstract_task(capture_events, celery, celery_invocation):
    events = capture_events()

    class AbstractTask(celery.Task):
        abstract = True

        def __call__(self, *args, **kwargs):
            try:
                return self.run(*args, **kwargs)
            except ZeroDivisionError:
                return None

    @celery.task(name="dummy_task", base=AbstractTask)
    def dummy_task(x, y):
        return x / y

    with start_transaction():
        celery_invocation(dummy_task, 1, 0)

    assert not events


def test_task_headers(celery):
    """
    Test that the headers set in the Celery Beat auto-instrumentation are passed to the celery signal handlers
    """
    sentry_crons_setup = {
        "sentry-monitor-slug": "some-slug",
        "sentry-monitor-config": {"some": "config"},
        "sentry-monitor-check-in-id": "123abc",
    }

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self, x, y):
        return _get_headers(self)

    # This is how the Celery Beat auto-instrumentation starts a task
    # in the monkey patched version of `apply_async`
    # in `sentry_sdk/integrations/celery.py::_wrap_apply_async()`
    result = dummy_task.apply_async(args=(1, 0), headers=sentry_crons_setup)

    expected_headers = sentry_crons_setup.copy()
    # Newly added headers
    expected_headers["sentry-trace"] = mock.ANY
    expected_headers["baggage"] = mock.ANY
    expected_headers["sentry-task-enqueued-time"] = mock.ANY

    assert result.get() == expected_headers


def test_baggage_propagation(init_celery):
    celery = init_celery(traces_sample_rate=1.0, release="abcdef")

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self, x, y):
        return _get_headers(self)

    with start_transaction() as transaction:
        result = dummy_task.apply_async(
            args=(1, 0),
            headers={"baggage": "custom=value"},
        ).get()

        assert sorted(result["baggage"].split(",")) == sorted(
            [
                "sentry-release=abcdef",
                "sentry-trace_id={}".format(transaction.trace_id),
                "sentry-environment=production",
                "sentry-sample_rate=1.0",
                "sentry-sampled=true",
                "custom=value",
            ]
        )


def test_sentry_propagate_traces_override(init_celery):
    """
    Test if the `sentry-propagate-traces` header given to `apply_async`
    overrides the `propagate_traces` parameter in the integration constructor.
    """
    celery = init_celery(
        propagate_traces=True, traces_sample_rate=1.0, release="abcdef"
    )

    @celery.task(name="dummy_task", bind=True)
    def dummy_task(self, message):
        trace_id = get_current_span().trace_id
        return trace_id

    with start_transaction() as transaction:
        transaction_trace_id = transaction.trace_id

        # should propagate trace
        task_transaction_id = dummy_task.apply_async(
            args=("some message",),
        ).get()
        assert transaction_trace_id == task_transaction_id

        # should NOT propagate trace (overrides `propagate_traces` parameter in integration constructor)
        task_transaction_id = dummy_task.apply_async(
            args=("another message",),
            headers={"sentry-propagate-traces": False},
        ).get()
        assert transaction_trace_id != task_transaction_id


def test_apply_async_manually_span(sentry_init):
    sentry_init(
        integrations=[CeleryIntegration()],
    )

    def dummy_function(*args, **kwargs):
        headers = kwargs.get("headers")
        assert "sentry-trace" in headers
        assert "baggage" in headers

    wrapped = _wrap_task_run(dummy_function)
    wrapped(mock.MagicMock(), (), headers={})


def test_apply_async_no_args(init_celery):
    celery = init_celery()

    @celery.task
    def example_task():
        return "success"

    try:
        result = example_task.apply_async(None, {})
    except TypeError:
        pytest.fail("Calling `apply_async` without arguments raised a TypeError")

    assert result.get() == "success"


@pytest.mark.parametrize("routing_key", ("celery", "custom"))
@mock.patch("celery.app.task.Task.request")
def test_messaging_destination_name_default_exchange(
    mock_request, routing_key, init_celery, capture_events
):
    celery_app = init_celery(enable_tracing=True)
    events = capture_events()
    mock_request.delivery_info = {"routing_key": routing_key, "exchange": ""}

    @celery_app.task()
    def task(): ...

    task.apply_async()

    (event,) = events
    (span,) = event["spans"]
    assert span["data"]["messaging.destination.name"] == routing_key


@mock.patch("celery.app.task.Task.request")
def test_messaging_destination_name_nondefault_exchange(
    mock_request, init_celery, capture_events
):
    """
    Currently, we only capture the routing key as the messaging.destination.name when
    we are using the default exchange (""). This is because the default exchange ensures
    that the routing key is the queue name. Other exchanges may not guarantee this
    behavior.
    """
    celery_app = init_celery(enable_tracing=True)
    events = capture_events()
    mock_request.delivery_info = {"routing_key": "celery", "exchange": "custom"}

    @celery_app.task()
    def task(): ...

    task.apply_async()

    (event,) = events
    (span,) = event["spans"]
    assert "messaging.destination.name" not in span["data"]


def test_messaging_id(init_celery, capture_events):
    celery = init_celery(enable_tracing=True)
    events = capture_events()

    @celery.task
    def example_task(): ...

    example_task.apply_async()

    (event,) = events
    (span,) = event["spans"]
    assert "messaging.message.id" in span["data"]


def test_retry_count_zero(init_celery, capture_events):
    celery = init_celery(enable_tracing=True)
    events = capture_events()

    @celery.task()
    def task(): ...

    task.apply_async()

    (event,) = events
    (span,) = event["spans"]
    assert span["data"]["messaging.message.retry.count"] == 0


@mock.patch("celery.app.task.Task.request")
def test_retry_count_nonzero(mock_request, init_celery, capture_events):
    mock_request.retries = 3

    celery = init_celery(enable_tracing=True)
    events = capture_events()

    @celery.task()
    def task(): ...

    task.apply_async()

    (event,) = events
    (span,) = event["spans"]
    assert span["data"]["messaging.message.retry.count"] == 3


@pytest.mark.parametrize("system", ("redis", "amqp"))
def test_messaging_system(system, init_celery, capture_events):
    celery = init_celery(enable_tracing=True)
    events = capture_events()

    # Does not need to be a real URL, since we use always eager
    celery.conf.broker_url = f"{system}://example.com"  # noqa: E231

    @celery.task()
    def task(): ...

    task.apply_async()

    (event,) = events
    (span,) = event["spans"]
    assert span["data"]["messaging.system"] == system


@pytest.mark.parametrize("system", ("amqp", "redis"))
def test_producer_span_data(system, monkeypatch, sentry_init, capture_events):
    old_publish = kombu.messaging.Producer._publish

    def publish(*args, **kwargs):
        pass

    monkeypatch.setattr(kombu.messaging.Producer, "_publish", publish)

    sentry_init(integrations=[CeleryIntegration()], enable_tracing=True)
    celery = Celery(__name__, broker=f"{system}://example.com")  # noqa: E231
    events = capture_events()

    @celery.task()
    def task(): ...

    with start_transaction():
        task.apply_async()

    (event,) = events
    span = next(span for span in event["spans"] if span["op"] == "queue.publish")

    assert span["data"]["messaging.system"] == system

    assert span["data"]["messaging.destination.name"] == "celery"
    assert "messaging.message.id" in span["data"]
    assert span["data"]["messaging.message.retry.count"] == 0

    monkeypatch.setattr(kombu.messaging.Producer, "_publish", old_publish)


def test_receive_latency(init_celery, capture_events):
    celery = init_celery(traces_sample_rate=1.0)
    events = capture_events()

    @celery.task()
    def task(): ...

    task.apply_async()

    (event,) = events
    (span,) = event["spans"]
    assert "messaging.message.receive.latency" in span["data"]
    assert span["data"]["messaging.message.receive.latency"] > 0


def tests_span_origin_consumer(init_celery, capture_events):
    celery = init_celery(enable_tracing=True)
    celery.conf.broker_url = "redis://example.com"  # noqa: E231

    events = capture_events()

    @celery.task()
    def task(): ...

    task.apply_async()

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "auto.queue.celery"
    assert event["spans"][0]["origin"] == "auto.queue.celery"


def tests_span_origin_producer(monkeypatch, sentry_init, capture_events):
    old_publish = kombu.messaging.Producer._publish

    def publish(*args, **kwargs):
        pass

    monkeypatch.setattr(kombu.messaging.Producer, "_publish", publish)

    sentry_init(integrations=[CeleryIntegration()], enable_tracing=True)
    celery = Celery(__name__, broker="redis://example.com")  # noqa: E231

    events = capture_events()

    @celery.task()
    def task(): ...

    with start_transaction(name="custom_transaction"):
        task.apply_async()

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"

    for span in event["spans"]:
        assert span["origin"] == "auto.queue.celery"

    monkeypatch.setattr(kombu.messaging.Producer, "_publish", old_publish)


@pytest.mark.forked
@mock.patch("celery.Celery.send_task")
def test_send_task_wrapped(
    patched_send_task,
    sentry_init,
    capture_events,
    reset_integrations,
):
    sentry_init(integrations=[CeleryIntegration()], enable_tracing=True)
    celery = Celery(__name__, broker="redis://example.com")  # noqa: E231

    events = capture_events()

    with sentry_sdk.start_transaction(name="custom_transaction"):
        celery.send_task("very_creative_task_name", args=(1, 2), kwargs={"foo": "bar"})

    (call,) = patched_send_task.call_args_list  # We should have exactly one call
    (args, kwargs) = call

    assert args == (celery, "very_creative_task_name")
    assert kwargs["args"] == (1, 2)
    assert kwargs["kwargs"] == {"foo": "bar"}
    assert set(kwargs["headers"].keys()) == {
        "sentry-task-enqueued-time",
        "sentry-trace",
        "baggage",
        "headers",
    }
    assert set(kwargs["headers"]["headers"].keys()) == {
        "sentry-trace",
        "baggage",
        "sentry-task-enqueued-time",
    }
    assert (
        kwargs["headers"]["sentry-trace"]
        == kwargs["headers"]["headers"]["sentry-trace"]
    )

    (event,) = events  # We should have exactly one event (the transaction)
    assert event["type"] == "transaction"
    assert event["transaction"] == "custom_transaction"

    (span,) = event["spans"]  # We should have exactly one span
    assert span["description"] == "very_creative_task_name"
    assert span["op"] == "queue.submit.celery"
    assert span["trace_id"] == kwargs["headers"]["sentry-trace"].split("-")[0]


@pytest.mark.skip(reason="placeholder so that forked test does not come last")
def test_placeholder():
    """Forked tests must not come last in the module.
    See https://github.com/pytest-dev/pytest-forked/issues/67#issuecomment-1964718720.
    """
    pass
sentry-python-2.18.0/tests/integrations/celery/test_celery_beat_crons.py000066400000000000000000000372561471214654000267140ustar00rootroot00000000000000import datetime
from unittest import mock
from unittest.mock import MagicMock

import pytest
from celery.schedules import crontab, schedule

from sentry_sdk.crons import MonitorStatus
from sentry_sdk.integrations.celery.beat import (
    _get_headers,
    _get_monitor_config,
    _patch_beat_apply_entry,
    _patch_redbeat_maybe_due,
    crons_task_failure,
    crons_task_retry,
    crons_task_success,
)
from sentry_sdk.integrations.celery.utils import _get_humanized_interval


def test_get_headers():
    fake_task = MagicMock()
    fake_task.request = {
        "bla": "blub",
        "foo": "bar",
    }

    assert _get_headers(fake_task) == {}

    fake_task.request.update(
        {
            "headers": {
                "bla": "blub",
            },
        }
    )

    assert _get_headers(fake_task) == {"bla": "blub"}

    fake_task.request.update(
        {
            "headers": {
                "headers": {
                    "tri": "blub",
                    "bar": "baz",
                },
                "bla": "blub",
            },
        }
    )

    assert _get_headers(fake_task) == {"bla": "blub", "tri": "blub", "bar": "baz"}


@pytest.mark.parametrize(
    "seconds, expected_tuple",
    [
        (0, (0, "second")),
        (1, (1, "second")),
        (0.00001, (0, "second")),
        (59, (59, "second")),
        (60, (1, "minute")),
        (100, (1, "minute")),
        (1000, (16, "minute")),
        (10000, (2, "hour")),
        (100000, (1, "day")),
        (100000000, (1157, "day")),
    ],
)
def test_get_humanized_interval(seconds, expected_tuple):
    assert _get_humanized_interval(seconds) == expected_tuple


def test_crons_task_success():
    fake_task = MagicMock()
    fake_task.request = {
        "headers": {
            "sentry-monitor-slug": "test123",
            "sentry-monitor-check-in-id": "1234567890",
            "sentry-monitor-start-timestamp-s": 200.1,
            "sentry-monitor-config": {
                "schedule": {
                    "type": "interval",
                    "value": 3,
                    "unit": "day",
                },
                "timezone": "Europe/Vienna",
            },
            "sentry-monitor-some-future-key": "some-future-value",
        },
    }

    with mock.patch(
        "sentry_sdk.integrations.celery.beat.capture_checkin"
    ) as mock_capture_checkin:
        with mock.patch(
            "sentry_sdk.integrations.celery.beat._now_seconds_since_epoch",
            return_value=500.5,
        ):
            crons_task_success(fake_task)

            mock_capture_checkin.assert_called_once_with(
                monitor_slug="test123",
                monitor_config={
                    "schedule": {
                        "type": "interval",
                        "value": 3,
                        "unit": "day",
                    },
                    "timezone": "Europe/Vienna",
                },
                duration=300.4,
                check_in_id="1234567890",
                status=MonitorStatus.OK,
            )


def test_crons_task_failure():
    fake_task = MagicMock()
    fake_task.request = {
        "headers": {
            "sentry-monitor-slug": "test123",
            "sentry-monitor-check-in-id": "1234567890",
            "sentry-monitor-start-timestamp-s": 200.1,
            "sentry-monitor-config": {
                "schedule": {
                    "type": "interval",
                    "value": 3,
                    "unit": "day",
                },
                "timezone": "Europe/Vienna",
            },
            "sentry-monitor-some-future-key": "some-future-value",
        },
    }

    with mock.patch(
        "sentry_sdk.integrations.celery.beat.capture_checkin"
    ) as mock_capture_checkin:
        with mock.patch(
            "sentry_sdk.integrations.celery.beat._now_seconds_since_epoch",
            return_value=500.5,
        ):
            crons_task_failure(fake_task)

            mock_capture_checkin.assert_called_once_with(
                monitor_slug="test123",
                monitor_config={
                    "schedule": {
                        "type": "interval",
                        "value": 3,
                        "unit": "day",
                    },
                    "timezone": "Europe/Vienna",
                },
                duration=300.4,
                check_in_id="1234567890",
                status=MonitorStatus.ERROR,
            )


def test_crons_task_retry():
    fake_task = MagicMock()
    fake_task.request = {
        "headers": {
            "sentry-monitor-slug": "test123",
            "sentry-monitor-check-in-id": "1234567890",
            "sentry-monitor-start-timestamp-s": 200.1,
            "sentry-monitor-config": {
                "schedule": {
                    "type": "interval",
                    "value": 3,
                    "unit": "day",
                },
                "timezone": "Europe/Vienna",
            },
            "sentry-monitor-some-future-key": "some-future-value",
        },
    }

    with mock.patch(
        "sentry_sdk.integrations.celery.beat.capture_checkin"
    ) as mock_capture_checkin:
        with mock.patch(
            "sentry_sdk.integrations.celery.beat._now_seconds_since_epoch",
            return_value=500.5,
        ):
            crons_task_retry(fake_task)

            mock_capture_checkin.assert_called_once_with(
                monitor_slug="test123",
                monitor_config={
                    "schedule": {
                        "type": "interval",
                        "value": 3,
                        "unit": "day",
                    },
                    "timezone": "Europe/Vienna",
                },
                duration=300.4,
                check_in_id="1234567890",
                status=MonitorStatus.ERROR,
            )


def test_get_monitor_config_crontab():
    app = MagicMock()
    app.timezone = "Europe/Vienna"

    # schedule with the default timezone
    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "crontab",
            "value": "*/10 12 3 * *",
        },
        "timezone": "UTC",  # the default because `crontab` does not know about the app
    }
    assert "unit" not in monitor_config["schedule"]

    # schedule with the timezone from the app
    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10", app=app)

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "crontab",
            "value": "*/10 12 3 * *",
        },
        "timezone": "Europe/Vienna",  # the timezone from the app
    }

    # schedule without a timezone, the celery integration will read the config from the app
    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
    celery_schedule.tz = None

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "crontab",
            "value": "*/10 12 3 * *",
        },
        "timezone": "Europe/Vienna",  # the timezone from the app
    }

    # schedule without a timezone, and an app without timezone, the celery integration will fall back to UTC
    app = MagicMock()
    app.timezone = None

    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
    celery_schedule.tz = None
    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "crontab",
            "value": "*/10 12 3 * *",
        },
        "timezone": "UTC",  # default timezone from celery integration
    }


def test_get_monitor_config_seconds():
    app = MagicMock()
    app.timezone = "Europe/Vienna"

    celery_schedule = schedule(run_every=3)  # seconds

    with mock.patch("sentry_sdk.integrations.logger.warning") as mock_logger_warning:
        monitor_config = _get_monitor_config(celery_schedule, app, "foo")
        mock_logger_warning.assert_called_with(
            "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
            "foo",
            3,
        )
        assert monitor_config == {}


def test_get_monitor_config_minutes():
    app = MagicMock()
    app.timezone = "Europe/Vienna"

    # schedule with the default timezone
    celery_schedule = schedule(run_every=60)  # seconds

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "interval",
            "value": 1,
            "unit": "minute",
        },
        "timezone": "UTC",
    }

    # schedule with the timezone from the app
    celery_schedule = schedule(run_every=60, app=app)  # seconds

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "interval",
            "value": 1,
            "unit": "minute",
        },
        "timezone": "Europe/Vienna",  # the timezone from the app
    }

    # schedule without a timezone, the celery integration will read the config from the app
    celery_schedule = schedule(run_every=60)  # seconds
    celery_schedule.tz = None

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "interval",
            "value": 1,
            "unit": "minute",
        },
        "timezone": "Europe/Vienna",  # the timezone from the app
    }

    # schedule without a timezone, and an app without timezone, the celery integration will fall back to UTC
    app = MagicMock()
    app.timezone = None

    celery_schedule = schedule(run_every=60)  # seconds
    celery_schedule.tz = None

    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
    assert monitor_config == {
        "schedule": {
            "type": "interval",
            "value": 1,
            "unit": "minute",
        },
        "timezone": "UTC",  # default timezone from celery integration
    }


def test_get_monitor_config_unknown():
    app = MagicMock()
    app.timezone = "Europe/Vienna"

    unknown_celery_schedule = MagicMock()
    monitor_config = _get_monitor_config(unknown_celery_schedule, app, "foo")
    assert monitor_config == {}


def test_get_monitor_config_default_timezone():
    app = MagicMock()
    app.timezone = None

    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")

    monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")

    assert monitor_config["timezone"] == "UTC"


def test_get_monitor_config_timezone_in_app_conf():
    app = MagicMock()
    app.timezone = "Asia/Karachi"

    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
    celery_schedule.tz = None

    monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")

    assert monitor_config["timezone"] == "Asia/Karachi"


def test_get_monitor_config_timezone_in_celery_schedule():
    app = MagicMock()
    app.timezone = "Asia/Karachi"

    panama_tz = datetime.timezone(datetime.timedelta(hours=-5), name="America/Panama")

    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
    celery_schedule.tz = panama_tz

    monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")

    assert monitor_config["timezone"] == str(panama_tz)


@pytest.mark.parametrize(
    "task_name,exclude_beat_tasks,task_in_excluded_beat_tasks",
    [
        ["some_task_name", ["xxx", "some_task.*"], True],
        ["some_task_name", ["xxx", "some_other_task.*"], False],
    ],
)
def test_exclude_beat_tasks_option(
    task_name, exclude_beat_tasks, task_in_excluded_beat_tasks
):
    """
    Test excluding Celery Beat tasks from automatic instrumentation.
    """
    fake_apply_entry = MagicMock()

    fake_scheduler = MagicMock()
    fake_scheduler.apply_entry = fake_apply_entry

    fake_integration = MagicMock()
    fake_integration.exclude_beat_tasks = exclude_beat_tasks

    fake_client = MagicMock()
    fake_client.get_integration.return_value = fake_integration

    fake_schedule_entry = MagicMock()
    fake_schedule_entry.name = task_name

    fake_get_monitor_config = MagicMock()

    with mock.patch(
        "sentry_sdk.integrations.celery.beat.Scheduler", fake_scheduler
    ) as Scheduler:  # noqa: N806
        with mock.patch(
            "sentry_sdk.integrations.celery.sentry_sdk.get_client",
            return_value=fake_client,
        ):
            with mock.patch(
                "sentry_sdk.integrations.celery.beat._get_monitor_config",
                fake_get_monitor_config,
            ) as _get_monitor_config:
                # Mimic CeleryIntegration patching of Scheduler.apply_entry()
                _patch_beat_apply_entry()
                # Mimic Celery Beat calling a task from the Beat schedule
                Scheduler.apply_entry(fake_scheduler, fake_schedule_entry)

                if task_in_excluded_beat_tasks:
                    # Only the original Scheduler.apply_entry() is called, _get_monitor_config is NOT called.
                    assert fake_apply_entry.call_count == 1
                    _get_monitor_config.assert_not_called()

                else:
                    # The original Scheduler.apply_entry() is called, AND _get_monitor_config is called.
                    assert fake_apply_entry.call_count == 1
                    assert _get_monitor_config.call_count == 1


@pytest.mark.parametrize(
    "task_name,exclude_beat_tasks,task_in_excluded_beat_tasks",
    [
        ["some_task_name", ["xxx", "some_task.*"], True],
        ["some_task_name", ["xxx", "some_other_task.*"], False],
    ],
)
def test_exclude_redbeat_tasks_option(
    task_name, exclude_beat_tasks, task_in_excluded_beat_tasks
):
    """
    Test excluding Celery RedBeat tasks from automatic instrumentation.
    """
    fake_maybe_due = MagicMock()

    fake_redbeat_scheduler = MagicMock()
    fake_redbeat_scheduler.maybe_due = fake_maybe_due

    fake_integration = MagicMock()
    fake_integration.exclude_beat_tasks = exclude_beat_tasks

    fake_client = MagicMock()
    fake_client.get_integration.return_value = fake_integration

    fake_schedule_entry = MagicMock()
    fake_schedule_entry.name = task_name

    fake_get_monitor_config = MagicMock()

    with mock.patch(
        "sentry_sdk.integrations.celery.beat.RedBeatScheduler", fake_redbeat_scheduler
    ) as RedBeatScheduler:  # noqa: N806
        with mock.patch(
            "sentry_sdk.integrations.celery.sentry_sdk.get_client",
            return_value=fake_client,
        ):
            with mock.patch(
                "sentry_sdk.integrations.celery.beat._get_monitor_config",
                fake_get_monitor_config,
            ) as _get_monitor_config:
                # Mimic CeleryIntegration patching of RedBeatScheduler.maybe_due()
                _patch_redbeat_maybe_due()
                # Mimic Celery RedBeat calling a task from the RedBeat schedule
                RedBeatScheduler.maybe_due(fake_redbeat_scheduler, fake_schedule_entry)

                if task_in_excluded_beat_tasks:
                    # Only the original RedBeatScheduler.maybe_due() is called, _get_monitor_config is NOT called.
                    assert fake_maybe_due.call_count == 1
                    _get_monitor_config.assert_not_called()

                else:
                    # The original RedBeatScheduler.maybe_due() is called, AND _get_monitor_config is called.
                    assert fake_maybe_due.call_count == 1
                    assert _get_monitor_config.call_count == 1
sentry-python-2.18.0/tests/integrations/celery/test_update_celery_task_headers.py000066400000000000000000000214471471214654000305670ustar00rootroot00000000000000from copy import copy
import itertools
import pytest

from unittest import mock

from sentry_sdk.integrations.celery import _update_celery_task_headers
import sentry_sdk
from sentry_sdk.tracing_utils import Baggage


BAGGAGE_VALUE = (
    "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
    "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
    "sentry-sample_rate=0.1337,"
    "custom=value"
)

SENTRY_TRACE_VALUE = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1"


@pytest.mark.parametrize("monitor_beat_tasks", [True, False, None, "", "bla", 1, 0])
def test_monitor_beat_tasks(monitor_beat_tasks):
    headers = {}
    span = None

    outgoing_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks)

    assert headers == {}  # left unchanged

    if monitor_beat_tasks:
        assert outgoing_headers["sentry-monitor-start-timestamp-s"] == mock.ANY
        assert (
            outgoing_headers["headers"]["sentry-monitor-start-timestamp-s"] == mock.ANY
        )
    else:
        assert "sentry-monitor-start-timestamp-s" not in outgoing_headers
        assert "sentry-monitor-start-timestamp-s" not in outgoing_headers["headers"]


@pytest.mark.parametrize("monitor_beat_tasks", [True, False, None, "", "bla", 1, 0])
def test_monitor_beat_tasks_with_headers(monitor_beat_tasks):
    headers = {
        "blub": "foo",
        "sentry-something": "bar",
        "sentry-task-enqueued-time": mock.ANY,
    }
    span = None

    outgoing_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks)

    assert headers == {
        "blub": "foo",
        "sentry-something": "bar",
        "sentry-task-enqueued-time": mock.ANY,
    }  # left unchanged

    if monitor_beat_tasks:
        assert outgoing_headers["blub"] == "foo"
        assert outgoing_headers["sentry-something"] == "bar"
        assert outgoing_headers["sentry-monitor-start-timestamp-s"] == mock.ANY
        assert outgoing_headers["headers"]["sentry-something"] == "bar"
        assert (
            outgoing_headers["headers"]["sentry-monitor-start-timestamp-s"] == mock.ANY
        )
    else:
        assert outgoing_headers["blub"] == "foo"
        assert outgoing_headers["sentry-something"] == "bar"
        assert "sentry-monitor-start-timestamp-s" not in outgoing_headers
        assert "sentry-monitor-start-timestamp-s" not in outgoing_headers["headers"]


def test_span_with_transaction(sentry_init):
    sentry_init(enable_tracing=True)
    headers = {}
    monitor_beat_tasks = False

    with sentry_sdk.start_transaction(name="test_transaction") as transaction:
        with sentry_sdk.start_span(op="test_span") as span:
            outgoing_headers = _update_celery_task_headers(
                headers, span, monitor_beat_tasks
            )

            assert outgoing_headers["sentry-trace"] == span.to_traceparent()
            assert outgoing_headers["headers"]["sentry-trace"] == span.to_traceparent()
            assert outgoing_headers["baggage"] == transaction.get_baggage().serialize()
            assert (
                outgoing_headers["headers"]["baggage"]
                == transaction.get_baggage().serialize()
            )


def test_span_with_transaction_custom_headers(sentry_init):
    sentry_init(enable_tracing=True)
    headers = {
        "baggage": BAGGAGE_VALUE,
        "sentry-trace": SENTRY_TRACE_VALUE,
    }

    with sentry_sdk.start_transaction(name="test_transaction") as transaction:
        with sentry_sdk.start_span(op="test_span") as span:
            outgoing_headers = _update_celery_task_headers(headers, span, False)

            assert outgoing_headers["sentry-trace"] == span.to_traceparent()
            assert outgoing_headers["headers"]["sentry-trace"] == span.to_traceparent()

            incoming_baggage = Baggage.from_incoming_header(headers["baggage"])
            combined_baggage = copy(transaction.get_baggage())
            combined_baggage.sentry_items.update(incoming_baggage.sentry_items)
            combined_baggage.third_party_items = ",".join(
                [
                    x
                    for x in [
                        combined_baggage.third_party_items,
                        incoming_baggage.third_party_items,
                    ]
                    if x is not None and x != ""
                ]
            )
            assert outgoing_headers["baggage"] == combined_baggage.serialize(
                include_third_party=True
            )
            assert outgoing_headers["headers"]["baggage"] == combined_baggage.serialize(
                include_third_party=True
            )


@pytest.mark.parametrize("monitor_beat_tasks", [True, False])
def test_celery_trace_propagation_default(sentry_init, monitor_beat_tasks):
    """
    The celery integration does not check the traces_sample_rate.
    By default traces_sample_rate is None which means "do not propagate traces".
    But the celery integration does not check this value.
    The Celery integration has its own mechanism to propagate traces:
    https://docs.sentry.io/platforms/python/integrations/celery/#distributed-traces
    """
    sentry_init()

    headers = {}
    span = None

    scope = sentry_sdk.get_isolation_scope()

    outgoing_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks)

    assert outgoing_headers["sentry-trace"] == scope.get_traceparent()
    assert outgoing_headers["headers"]["sentry-trace"] == scope.get_traceparent()
    assert outgoing_headers["baggage"] == scope.get_baggage().serialize()
    assert outgoing_headers["headers"]["baggage"] == scope.get_baggage().serialize()

    if monitor_beat_tasks:
        assert "sentry-monitor-start-timestamp-s" in outgoing_headers
        assert "sentry-monitor-start-timestamp-s" in outgoing_headers["headers"]
    else:
        assert "sentry-monitor-start-timestamp-s" not in outgoing_headers
        assert "sentry-monitor-start-timestamp-s" not in outgoing_headers["headers"]


@pytest.mark.parametrize(
    "traces_sample_rate,monitor_beat_tasks",
    list(itertools.product([None, 0, 0.0, 0.5, 1.0, 1, 2], [True, False])),
)
def test_celery_trace_propagation_traces_sample_rate(
    sentry_init, traces_sample_rate, monitor_beat_tasks
):
    """
    The celery integration does not check the traces_sample_rate.
    By default traces_sample_rate is None which means "do not propagate traces".
    But the celery integration does not check this value.
    The Celery integration has its own mechanism to propagate traces:
    https://docs.sentry.io/platforms/python/integrations/celery/#distributed-traces
    """
    sentry_init(traces_sample_rate=traces_sample_rate)

    headers = {}
    span = None

    scope = sentry_sdk.get_isolation_scope()

    outgoing_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks)

    assert outgoing_headers["sentry-trace"] == scope.get_traceparent()
    assert outgoing_headers["headers"]["sentry-trace"] == scope.get_traceparent()
    assert outgoing_headers["baggage"] == scope.get_baggage().serialize()
    assert outgoing_headers["headers"]["baggage"] == scope.get_baggage().serialize()

    if monitor_beat_tasks:
        assert "sentry-monitor-start-timestamp-s" in outgoing_headers
        assert "sentry-monitor-start-timestamp-s" in outgoing_headers["headers"]
    else:
        assert "sentry-monitor-start-timestamp-s" not in outgoing_headers
        assert "sentry-monitor-start-timestamp-s" not in outgoing_headers["headers"]


@pytest.mark.parametrize(
    "enable_tracing,monitor_beat_tasks",
    list(itertools.product([None, True, False], [True, False])),
)
def test_celery_trace_propagation_enable_tracing(
    sentry_init, enable_tracing, monitor_beat_tasks
):
    """
    The celery integration does not check the traces_sample_rate.
    By default traces_sample_rate is None which means "do not propagate traces".
    But the celery integration does not check this value.
    The Celery integration has its own mechanism to propagate traces:
    https://docs.sentry.io/platforms/python/integrations/celery/#distributed-traces
    """
    sentry_init(enable_tracing=enable_tracing)

    headers = {}
    span = None

    scope = sentry_sdk.get_isolation_scope()

    outgoing_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks)

    assert outgoing_headers["sentry-trace"] == scope.get_traceparent()
    assert outgoing_headers["headers"]["sentry-trace"] == scope.get_traceparent()
    assert outgoing_headers["baggage"] == scope.get_baggage().serialize()
    assert outgoing_headers["headers"]["baggage"] == scope.get_baggage().serialize()

    if monitor_beat_tasks:
        assert "sentry-monitor-start-timestamp-s" in outgoing_headers
        assert "sentry-monitor-start-timestamp-s" in outgoing_headers["headers"]
    else:
        assert "sentry-monitor-start-timestamp-s" not in outgoing_headers
        assert "sentry-monitor-start-timestamp-s" not in outgoing_headers["headers"]
sentry-python-2.18.0/tests/integrations/chalice/000077500000000000000000000000001471214654000217115ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/chalice/__init__.py000066400000000000000000000000561471214654000240230ustar00rootroot00000000000000import pytest

pytest.importorskip("chalice")
sentry-python-2.18.0/tests/integrations/chalice/test_chalice.py000066400000000000000000000104451471214654000247160ustar00rootroot00000000000000import pytest
import time
from chalice import Chalice, BadRequestError
from chalice.local import LambdaContext, LocalGateway

from sentry_sdk import capture_message
from sentry_sdk.integrations.chalice import CHALICE_VERSION, ChaliceIntegration
from sentry_sdk.utils import parse_version

from pytest_chalice.handlers import RequestHandler


def _generate_lambda_context(self):
    # Monkeypatch of the function _generate_lambda_context
    # from the class LocalGateway
    # for mock the timeout
    # type: () -> LambdaContext
    if self._config.lambda_timeout is None:
        timeout = 10 * 1000
    else:
        timeout = self._config.lambda_timeout * 1000
    return LambdaContext(
        function_name=self._config.function_name,
        memory_size=self._config.lambda_memory_size,
        max_runtime_ms=timeout,
    )


@pytest.fixture
def app(sentry_init):
    sentry_init(integrations=[ChaliceIntegration()])
    app = Chalice(app_name="sentry_chalice")

    @app.route("/boom")
    def boom():
        raise Exception("boom goes the dynamite!")

    @app.route("/context")
    def has_request():
        raise Exception("boom goes the dynamite!")

    @app.route("/badrequest")
    def badrequest():
        raise BadRequestError("bad-request")

    @app.route("/message")
    def hi():
        capture_message("hi")
        return {"status": "ok"}

    @app.route("/message/{message_id}")
    def hi_with_id(message_id):
        capture_message("hi again")
        return {"status": "ok"}

    LocalGateway._generate_lambda_context = _generate_lambda_context

    return app


@pytest.fixture
def lambda_context_args():
    return ["lambda_name", 256]


def test_exception_boom(app, client: RequestHandler) -> None:
    response = client.get("/boom")
    assert response.status_code == 500
    assert response.json == {
        "Code": "InternalServerError",
        "Message": "An internal server error occurred.",
    }


def test_has_request(app, capture_events, client: RequestHandler):
    events = capture_events()

    response = client.get("/context")
    assert response.status_code == 500

    (event,) = events
    assert event["level"] == "error"
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "Exception"


def test_scheduled_event(app, lambda_context_args):
    @app.schedule("rate(1 minutes)")
    def every_hour(event):
        raise Exception("schedule event!")

    context = LambdaContext(
        *lambda_context_args, max_runtime_ms=10000, time_source=time
    )

    lambda_event = {
        "version": "0",
        "account": "120987654312",
        "region": "us-west-1",
        "detail": {},
        "detail-type": "Scheduled Event",
        "source": "aws.events",
        "time": "1970-01-01T00:00:00Z",
        "id": "event-id",
        "resources": ["arn:aws:events:us-west-1:120987654312:rule/my-schedule"],
    }
    with pytest.raises(Exception) as exc_info:
        every_hour(lambda_event, context=context)
    assert str(exc_info.value) == "schedule event!"


@pytest.mark.skipif(
    parse_version(CHALICE_VERSION) >= (1, 28),
    reason="different behavior based on chalice version",
)
def test_bad_request_old(client: RequestHandler) -> None:
    response = client.get("/badrequest")

    assert response.status_code == 400
    assert response.json == {
        "Code": "BadRequestError",
        "Message": "BadRequestError: bad-request",
    }


@pytest.mark.skipif(
    parse_version(CHALICE_VERSION) < (1, 28),
    reason="different behavior based on chalice version",
)
def test_bad_request(client: RequestHandler) -> None:
    response = client.get("/badrequest")

    assert response.status_code == 400
    assert response.json == {
        "Code": "BadRequestError",
        "Message": "bad-request",
    }


@pytest.mark.parametrize(
    "url,expected_transaction,expected_source",
    [
        ("/message", "api_handler", "component"),
        ("/message/123456", "api_handler", "component"),
    ],
)
def test_transaction(
    app,
    client: RequestHandler,
    capture_events,
    url,
    expected_transaction,
    expected_source,
):
    events = capture_events()

    response = client.get(url)
    assert response.status_code == 200

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}
sentry-python-2.18.0/tests/integrations/clickhouse_driver/000077500000000000000000000000001471214654000240255ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/clickhouse_driver/__init__.py000066400000000000000000000000701471214654000261330ustar00rootroot00000000000000import pytest

pytest.importorskip("clickhouse_driver")
sentry-python-2.18.0/tests/integrations/clickhouse_driver/test_clickhouse_driver.py000066400000000000000000000734061471214654000311540ustar00rootroot00000000000000"""
Tests need a local clickhouse instance running, this can best be done using
```sh
docker run -d -p 18123:8123 -p9000:9000 --name clickhouse-test --ulimit nofile=262144:262144 --rm clickhouse/clickhouse-server
```
"""

import clickhouse_driver
from clickhouse_driver import Client, connect

from sentry_sdk import start_transaction, capture_message
from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration
from tests.conftest import ApproxDict

EXPECT_PARAMS_IN_SELECT = True
if clickhouse_driver.VERSION < (0, 2, 6):
    EXPECT_PARAMS_IN_SELECT = False


def test_clickhouse_client_breadcrumbs(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    client = Client("localhost")
    client.execute("DROP TABLE IF EXISTS test")
    client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
    client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
    client.execute("INSERT INTO test (x) VALUES", [[170], [200]])

    res = client.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
    assert res[0][0] == 370

    capture_message("hi")

    (event,) = events

    expected_breadcrumbs = [
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "DROP TABLE IF EXISTS test",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "SELECT sum(x) FROM test WHERE x > 150",
            "type": "default",
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_breadcrumbs[-1]["data"].pop("db.params", None)

    for crumb in expected_breadcrumbs:
        crumb["data"] = ApproxDict(crumb["data"])

    for crumb in event["breadcrumbs"]["values"]:
        crumb.pop("timestamp", None)

    assert event["breadcrumbs"]["values"] == expected_breadcrumbs


def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        send_default_pii=True,
        _experiments={"record_sql_params": True},
    )
    events = capture_events()

    client = Client("localhost")
    client.execute("DROP TABLE IF EXISTS test")
    client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
    client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
    client.execute("INSERT INTO test (x) VALUES", [[170], [200]])

    res = client.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
    assert res[0][0] == 370

    capture_message("hi")

    (event,) = events

    expected_breadcrumbs = [
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [],
            },
            "message": "DROP TABLE IF EXISTS test",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [],
            },
            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [{"x": 100}],
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [[170], [200]],
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [[370]],
                "db.params": {"minv": 150},
            },
            "message": "SELECT sum(x) FROM test WHERE x > 150",
            "type": "default",
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_breadcrumbs[-1]["data"].pop("db.params", None)

    for crumb in expected_breadcrumbs:
        crumb["data"] = ApproxDict(crumb["data"])

    for crumb in event["breadcrumbs"]["values"]:
        crumb.pop("timestamp", None)

    assert event["breadcrumbs"]["values"] == expected_breadcrumbs


def test_clickhouse_client_spans(
    sentry_init, capture_events, capture_envelopes
) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
    )
    events = capture_events()

    transaction_trace_id = None
    transaction_span_id = None

    with start_transaction(name="test_clickhouse_transaction") as transaction:
        transaction_trace_id = transaction.trace_id
        transaction_span_id = transaction.span_id

        client = Client("localhost")
        client.execute("DROP TABLE IF EXISTS test")
        client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
        client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
        client.execute("INSERT INTO test (x) VALUES", [[170], [200]])

        res = client.execute(
            "SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150}
        )
        assert res[0][0] == 370

    (event,) = events

    expected_spans = [
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "DROP TABLE IF EXISTS test",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "SELECT sum(x) FROM test WHERE x > 150",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_spans[-1]["data"].pop("db.params", None)

    for span in expected_spans:
        span["data"] = ApproxDict(span["data"])

    for span in event["spans"]:
        span.pop("span_id", None)
        span.pop("start_timestamp", None)
        span.pop("timestamp", None)

    assert event["spans"] == expected_spans


def test_clickhouse_client_spans_with_pii(
    sentry_init, capture_events, capture_envelopes
) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
        send_default_pii=True,
    )
    events = capture_events()

    transaction_trace_id = None
    transaction_span_id = None

    with start_transaction(name="test_clickhouse_transaction") as transaction:
        transaction_trace_id = transaction.trace_id
        transaction_span_id = transaction.span_id

        client = Client("localhost")
        client.execute("DROP TABLE IF EXISTS test")
        client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
        client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
        client.execute("INSERT INTO test (x) VALUES", [[170], [200]])

        res = client.execute(
            "SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150}
        )
        assert res[0][0] == 370

    (event,) = events

    expected_spans = [
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "DROP TABLE IF EXISTS test",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [{"x": 100}],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [[170], [200]],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "SELECT sum(x) FROM test WHERE x > 150",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": {"minv": 150},
                "db.result": [[370]],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_spans[-1]["data"].pop("db.params", None)

    for span in expected_spans:
        span["data"] = ApproxDict(span["data"])

    for span in event["spans"]:
        span.pop("span_id", None)
        span.pop("start_timestamp", None)
        span.pop("timestamp", None)

    assert event["spans"] == expected_spans


def test_clickhouse_dbapi_breadcrumbs(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
    )
    events = capture_events()

    conn = connect("clickhouse://localhost")
    cursor = conn.cursor()
    cursor.execute("DROP TABLE IF EXISTS test")
    cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
    cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
    cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
    cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
    res = cursor.fetchall()

    assert res[0][0] == 370

    capture_message("hi")

    (event,) = events

    expected_breadcrumbs = [
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "DROP TABLE IF EXISTS test",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "message": "SELECT sum(x) FROM test WHERE x > 150",
            "type": "default",
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_breadcrumbs[-1]["data"].pop("db.params", None)

    for crumb in expected_breadcrumbs:
        crumb["data"] = ApproxDict(crumb["data"])

    for crumb in event["breadcrumbs"]["values"]:
        crumb.pop("timestamp", None)

    assert event["breadcrumbs"]["values"] == expected_breadcrumbs


def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        send_default_pii=True,
    )
    events = capture_events()

    conn = connect("clickhouse://localhost")
    cursor = conn.cursor()
    cursor.execute("DROP TABLE IF EXISTS test")
    cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
    cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
    cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
    cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
    res = cursor.fetchall()

    assert res[0][0] == 370

    capture_message("hi")

    (event,) = events

    expected_breadcrumbs = [
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [[], []],
            },
            "message": "DROP TABLE IF EXISTS test",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [[], []],
            },
            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [{"x": 100}],
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [[170], [200]],
            },
            "message": "INSERT INTO test (x) VALUES",
            "type": "default",
        },
        {
            "category": "query",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": {"minv": 150},
                "db.result": [[["370"]], [["'sum(x)'", "'Int64'"]]],
            },
            "message": "SELECT sum(x) FROM test WHERE x > 150",
            "type": "default",
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_breadcrumbs[-1]["data"].pop("db.params", None)

    for crumb in expected_breadcrumbs:
        crumb["data"] = ApproxDict(crumb["data"])

    for crumb in event["breadcrumbs"]["values"]:
        crumb.pop("timestamp", None)

    assert event["breadcrumbs"]["values"] == expected_breadcrumbs


def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
    )
    events = capture_events()

    transaction_trace_id = None
    transaction_span_id = None

    with start_transaction(name="test_clickhouse_transaction") as transaction:
        transaction_trace_id = transaction.trace_id
        transaction_span_id = transaction.span_id

        conn = connect("clickhouse://localhost")
        cursor = conn.cursor()
        cursor.execute("DROP TABLE IF EXISTS test")
        cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
        cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
        cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
        cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
        res = cursor.fetchall()

        assert res[0][0] == 370

    (event,) = events

    expected_spans = [
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "DROP TABLE IF EXISTS test",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "SELECT sum(x) FROM test WHERE x > 150",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_spans[-1]["data"].pop("db.params", None)

    for span in expected_spans:
        span["data"] = ApproxDict(span["data"])

    for span in event["spans"]:
        span.pop("span_id", None)
        span.pop("start_timestamp", None)
        span.pop("timestamp", None)

    assert event["spans"] == expected_spans


def test_clickhouse_dbapi_spans_with_pii(
    sentry_init, capture_events, capture_envelopes
) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
        send_default_pii=True,
    )
    events = capture_events()

    transaction_trace_id = None
    transaction_span_id = None

    with start_transaction(name="test_clickhouse_transaction") as transaction:
        transaction_trace_id = transaction.trace_id
        transaction_span_id = transaction.span_id

        conn = connect("clickhouse://localhost")
        cursor = conn.cursor()
        cursor.execute("DROP TABLE IF EXISTS test")
        cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
        cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
        cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
        cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
        res = cursor.fetchall()

        assert res[0][0] == 370

    (event,) = events

    expected_spans = [
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "DROP TABLE IF EXISTS test",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [[], []],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.result": [[], []],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [{"x": 100}],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "INSERT INTO test (x) VALUES",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": [[170], [200]],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
        {
            "op": "db",
            "origin": "auto.db.clickhouse_driver",
            "description": "SELECT sum(x) FROM test WHERE x > 150",
            "data": {
                "db.system": "clickhouse",
                "db.name": "",
                "db.user": "default",
                "server.address": "localhost",
                "server.port": 9000,
                "db.params": {"minv": 150},
                "db.result": [[[370]], [["sum(x)", "Int64"]]],
            },
            "same_process_as_parent": True,
            "trace_id": transaction_trace_id,
            "parent_span_id": transaction_span_id,
        },
    ]

    if not EXPECT_PARAMS_IN_SELECT:
        expected_spans[-1]["data"].pop("db.params", None)

    for span in expected_spans:
        span["data"] = ApproxDict(span["data"])

    for span in event["spans"]:
        span.pop("span_id", None)
        span.pop("start_timestamp", None)
        span.pop("timestamp", None)

    assert event["spans"] == expected_spans


def test_span_origin(sentry_init, capture_events, capture_envelopes) -> None:
    sentry_init(
        integrations=[ClickhouseDriverIntegration()],
        traces_sample_rate=1.0,
    )

    events = capture_events()

    with start_transaction(name="test_clickhouse_transaction"):
        conn = connect("clickhouse://localhost")
        cursor = conn.cursor()
        cursor.execute("SELECT 1")

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.db.clickhouse_driver"
sentry-python-2.18.0/tests/integrations/cloud_resource_context/000077500000000000000000000000001471214654000251025ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/cloud_resource_context/__init__.py000066400000000000000000000000001471214654000272010ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/cloud_resource_context/test_cloud_resource_context.py000066400000000000000000000315261471214654000333030ustar00rootroot00000000000000import json
from unittest import mock
from unittest.mock import MagicMock

import pytest

from sentry_sdk.integrations.cloud_resource_context import (
    CLOUD_PLATFORM,
    CLOUD_PROVIDER,
)

AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD = {
    "accountId": "298817902971",
    "architecture": "x86_64",
    "availabilityZone": "us-east-1b",
    "billingProducts": None,
    "devpayProductCodes": None,
    "marketplaceProductCodes": None,
    "imageId": "ami-00874d747dde344fa",
    "instanceId": "i-07d3301297fe0a55a",
    "instanceType": "t2.small",
    "kernelId": None,
    "pendingTime": "2023-02-08T07:54:05Z",
    "privateIp": "171.131.65.115",
    "ramdiskId": None,
    "region": "us-east-1",
    "version": "2017-09-30",
}


AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
    json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD), "utf-8"
)


GCP_GCE_EXAMPLE_METADATA_PLAYLOAD = {
    "instance": {
        "attributes": {},
        "cpuPlatform": "Intel Broadwell",
        "description": "",
        "disks": [
            {
                "deviceName": "tests-cloud-contexts-in-python-sdk",
                "index": 0,
                "interface": "SCSI",
                "mode": "READ_WRITE",
                "type": "PERSISTENT-BALANCED",
            }
        ],
        "guestAttributes": {},
        "hostname": "tests-cloud-contexts-in-python-sdk.c.client-infra-internal.internal",
        "id": 1535324527892303790,
        "image": "projects/debian-cloud/global/images/debian-11-bullseye-v20221206",
        "licenses": [{"id": "2853224013536823851"}],
        "machineType": "projects/542054129475/machineTypes/e2-medium",
        "maintenanceEvent": "NONE",
        "name": "tests-cloud-contexts-in-python-sdk",
        "networkInterfaces": [
            {
                "accessConfigs": [
                    {"externalIp": "134.30.53.15", "type": "ONE_TO_ONE_NAT"}
                ],
                "dnsServers": ["169.254.169.254"],
                "forwardedIps": [],
                "gateway": "10.188.0.1",
                "ip": "10.188.0.3",
                "ipAliases": [],
                "mac": "42:01:0c:7c:00:13",
                "mtu": 1460,
                "network": "projects/544954029479/networks/default",
                "subnetmask": "255.255.240.0",
                "targetInstanceIps": [],
            }
        ],
        "preempted": "FALSE",
        "remainingCpuTime": -1,
        "scheduling": {
            "automaticRestart": "TRUE",
            "onHostMaintenance": "MIGRATE",
            "preemptible": "FALSE",
        },
        "serviceAccounts": {},
        "tags": ["http-server", "https-server"],
        "virtualClock": {"driftToken": "0"},
        "zone": "projects/142954069479/zones/northamerica-northeast2-b",
    },
    "oslogin": {"authenticate": {"sessions": {}}},
    "project": {
        "attributes": {},
        "numericProjectId": 204954049439,
        "projectId": "my-project-internal",
    },
}

try:
    # Python 3
    GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES = bytes(
        json.dumps(GCP_GCE_EXAMPLE_METADATA_PLAYLOAD), "utf-8"
    )
except TypeError:
    # Python 2
    GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES = bytes(
        json.dumps(GCP_GCE_EXAMPLE_METADATA_PLAYLOAD)
    ).encode("utf-8")


def test_is_aws_http_error():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    response = MagicMock()
    response.status = 405

    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._is_aws() is False
    assert CloudResourceContextIntegration.aws_token == ""


def test_is_aws_ok():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    response = MagicMock()
    response.status = 200
    response.data = b"something"
    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._is_aws() is True
    assert CloudResourceContextIntegration.aws_token == "something"

    CloudResourceContextIntegration.http.request = MagicMock(
        side_effect=Exception("Test")
    )
    assert CloudResourceContextIntegration._is_aws() is False


def test_is_aw_exception():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(
        side_effect=Exception("Test")
    )

    assert CloudResourceContextIntegration._is_aws() is False


@pytest.mark.parametrize(
    "http_status, response_data, expected_context",
    [
        [
            405,
            b"",
            {
                "cloud.provider": CLOUD_PROVIDER.AWS,
                "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
            },
        ],
        [
            200,
            b"something-but-not-json",
            {
                "cloud.provider": CLOUD_PROVIDER.AWS,
                "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
            },
        ],
        [
            200,
            AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES,
            {
                "cloud.provider": "aws",
                "cloud.platform": "aws_ec2",
                "cloud.account.id": "298817902971",
                "cloud.availability_zone": "us-east-1b",
                "cloud.region": "us-east-1",
                "host.id": "i-07d3301297fe0a55a",
                "host.type": "t2.small",
            },
        ],
    ],
)
def test_get_aws_context(http_status, response_data, expected_context):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    response = MagicMock()
    response.status = http_status
    response.data = response_data

    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._get_aws_context() == expected_context


def test_is_gcp_http_error():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    response = MagicMock()
    response.status = 405
    response.data = b'{"some": "json"}'
    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._is_gcp() is False
    assert CloudResourceContextIntegration.gcp_metadata is None


def test_is_gcp_ok():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    response = MagicMock()
    response.status = 200
    response.data = b'{"some": "json"}'
    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._is_gcp() is True
    assert CloudResourceContextIntegration.gcp_metadata == {"some": "json"}


def test_is_gcp_exception():
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(
        side_effect=Exception("Test")
    )
    assert CloudResourceContextIntegration._is_gcp() is False


@pytest.mark.parametrize(
    "http_status, response_data, expected_context",
    [
        [
            405,
            None,
            {
                "cloud.provider": CLOUD_PROVIDER.GCP,
                "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
            },
        ],
        [
            200,
            b"something-but-not-json",
            {
                "cloud.provider": CLOUD_PROVIDER.GCP,
                "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
            },
        ],
        [
            200,
            GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES,
            {
                "cloud.provider": "gcp",
                "cloud.platform": "gcp_compute_engine",
                "cloud.account.id": "my-project-internal",
                "cloud.availability_zone": "northamerica-northeast2-b",
                "host.id": 1535324527892303790,
            },
        ],
    ],
)
def test_get_gcp_context(http_status, response_data, expected_context):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration.gcp_metadata = None

    response = MagicMock()
    response.status = http_status
    response.data = response_data

    CloudResourceContextIntegration.http = MagicMock()
    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)

    assert CloudResourceContextIntegration._get_gcp_context() == expected_context


@pytest.mark.parametrize(
    "is_aws, is_gcp, expected_provider",
    [
        [False, False, ""],
        [False, True, CLOUD_PROVIDER.GCP],
        [True, False, CLOUD_PROVIDER.AWS],
        [True, True, CLOUD_PROVIDER.AWS],
    ],
)
def test_get_cloud_provider(is_aws, is_gcp, expected_provider):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration._is_aws = MagicMock(return_value=is_aws)
    CloudResourceContextIntegration._is_gcp = MagicMock(return_value=is_gcp)

    assert CloudResourceContextIntegration._get_cloud_provider() == expected_provider


@pytest.mark.parametrize(
    "cloud_provider",
    [
        CLOUD_PROVIDER.ALIBABA,
        CLOUD_PROVIDER.AZURE,
        CLOUD_PROVIDER.IBM,
        CLOUD_PROVIDER.TENCENT,
    ],
)
def test_get_cloud_resource_context_unsupported_providers(cloud_provider):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration._get_cloud_provider = MagicMock(
        return_value=cloud_provider
    )

    assert CloudResourceContextIntegration._get_cloud_resource_context() == {}


@pytest.mark.parametrize(
    "cloud_provider",
    [
        CLOUD_PROVIDER.AWS,
        CLOUD_PROVIDER.GCP,
    ],
)
def test_get_cloud_resource_context_supported_providers(cloud_provider):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration._get_cloud_provider = MagicMock(
        return_value=cloud_provider
    )

    assert CloudResourceContextIntegration._get_cloud_resource_context() != {}


@pytest.mark.parametrize(
    "cloud_provider, cloud_resource_context, warning_called, set_context_called",
    [
        ["", {}, False, False],
        [CLOUD_PROVIDER.AWS, {}, False, False],
        [CLOUD_PROVIDER.GCP, {}, False, False],
        [CLOUD_PROVIDER.AZURE, {}, True, False],
        [CLOUD_PROVIDER.ALIBABA, {}, True, False],
        [CLOUD_PROVIDER.IBM, {}, True, False],
        [CLOUD_PROVIDER.TENCENT, {}, True, False],
        ["", {"some": "context"}, False, True],
        [CLOUD_PROVIDER.AWS, {"some": "context"}, False, True],
        [CLOUD_PROVIDER.GCP, {"some": "context"}, False, True],
    ],
)
def test_setup_once(
    cloud_provider, cloud_resource_context, warning_called, set_context_called
):
    from sentry_sdk.integrations.cloud_resource_context import (
        CloudResourceContextIntegration,
    )

    CloudResourceContextIntegration.cloud_provider = cloud_provider
    CloudResourceContextIntegration._get_cloud_resource_context = MagicMock(
        return_value=cloud_resource_context
    )

    with mock.patch(
        "sentry_sdk.integrations.cloud_resource_context.set_context"
    ) as fake_set_context:
        with mock.patch(
            "sentry_sdk.integrations.cloud_resource_context.logger.warning"
        ) as fake_warning:
            CloudResourceContextIntegration.setup_once()

            if set_context_called:
                fake_set_context.assert_called_once_with(
                    "cloud_resource", cloud_resource_context
                )
            else:
                fake_set_context.assert_not_called()

            def invalid_value_warning_calls():
                """
                Iterator that yields True if the warning was called with the expected message.
                Written as a generator function, rather than a list comprehension, to allow
                us to handle exceptions that might be raised during the iteration if the
                warning call was not as expected.
                """
                for call in fake_warning.call_args_list:
                    try:
                        yield call[0][0].startswith("Invalid value for cloud_provider:")
                    except (IndexError, KeyError, TypeError, AttributeError):
                        ...

            assert warning_called == any(invalid_value_warning_calls())
sentry-python-2.18.0/tests/integrations/cohere/000077500000000000000000000000001471214654000215665ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/cohere/__init__.py000066400000000000000000000000551471214654000236770ustar00rootroot00000000000000import pytest

pytest.importorskip("cohere")
sentry-python-2.18.0/tests/integrations/cohere/test_cohere.py000066400000000000000000000211331471214654000244440ustar00rootroot00000000000000import json

import httpx
import pytest
from cohere import Client, ChatMessage

from sentry_sdk import start_transaction
from sentry_sdk.integrations.cohere import CohereIntegration

from unittest import mock  # python 3.3 and above
from httpx import Client as HTTPXClient


@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [(True, True), (True, False), (False, True), (False, False)],
)
def test_nonstreaming_chat(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    sentry_init(
        integrations=[CohereIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = Client(api_key="z")
    HTTPXClient.request = mock.Mock(
        return_value=httpx.Response(
            200,
            json={
                "text": "the model response",
                "meta": {
                    "billed_units": {
                        "output_tokens": 10,
                        "input_tokens": 20,
                    }
                },
            },
        )
    )

    with start_transaction(name="cohere tx"):
        response = client.chat(
            model="some-model",
            chat_history=[ChatMessage(role="SYSTEM", message="some context")],
            message="hello",
        ).text

    assert response == "the model response"
    tx = events[0]
    assert tx["type"] == "transaction"
    span = tx["spans"][0]
    assert span["op"] == "ai.chat_completions.create.cohere"
    assert span["data"]["ai.model_id"] == "some-model"

    if send_default_pii and include_prompts:
        assert "some context" in span["data"]["ai.input_messages"][0]["content"]
        assert "hello" in span["data"]["ai.input_messages"][1]["content"]
        assert "the model response" in span["data"]["ai.responses"]
    else:
        assert "ai.input_messages" not in span["data"]
        assert "ai.responses" not in span["data"]

    assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10
    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 30


# noinspection PyTypeChecker
@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [(True, True), (True, False), (False, True), (False, False)],
)
def test_streaming_chat(sentry_init, capture_events, send_default_pii, include_prompts):
    sentry_init(
        integrations=[CohereIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = Client(api_key="z")
    HTTPXClient.send = mock.Mock(
        return_value=httpx.Response(
            200,
            content="\n".join(
                [
                    json.dumps({"event_type": "text-generation", "text": "the model "}),
                    json.dumps({"event_type": "text-generation", "text": "response"}),
                    json.dumps(
                        {
                            "event_type": "stream-end",
                            "finish_reason": "COMPLETE",
                            "response": {
                                "text": "the model response",
                                "meta": {
                                    "billed_units": {
                                        "output_tokens": 10,
                                        "input_tokens": 20,
                                    }
                                },
                            },
                        }
                    ),
                ]
            ),
        )
    )

    with start_transaction(name="cohere tx"):
        responses = list(
            client.chat_stream(
                model="some-model",
                chat_history=[ChatMessage(role="SYSTEM", message="some context")],
                message="hello",
            )
        )
        response_string = responses[-1].response.text

    assert response_string == "the model response"
    tx = events[0]
    assert tx["type"] == "transaction"
    span = tx["spans"][0]
    assert span["op"] == "ai.chat_completions.create.cohere"
    assert span["data"]["ai.model_id"] == "some-model"

    if send_default_pii and include_prompts:
        assert "some context" in span["data"]["ai.input_messages"][0]["content"]
        assert "hello" in span["data"]["ai.input_messages"][1]["content"]
        assert "the model response" in span["data"]["ai.responses"]
    else:
        assert "ai.input_messages" not in span["data"]
        assert "ai.responses" not in span["data"]

    assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10
    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 30


def test_bad_chat(sentry_init, capture_events):
    sentry_init(integrations=[CohereIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    client = Client(api_key="z")
    HTTPXClient.request = mock.Mock(
        side_effect=httpx.HTTPError("API rate limit reached")
    )
    with pytest.raises(httpx.HTTPError):
        client.chat(model="some-model", message="hello")

    (event,) = events
    assert event["level"] == "error"


@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [(True, True), (True, False), (False, True), (False, False)],
)
def test_embed(sentry_init, capture_events, send_default_pii, include_prompts):
    sentry_init(
        integrations=[CohereIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = Client(api_key="z")
    HTTPXClient.request = mock.Mock(
        return_value=httpx.Response(
            200,
            json={
                "response_type": "embeddings_floats",
                "id": "1",
                "texts": ["hello"],
                "embeddings": [[1.0, 2.0, 3.0]],
                "meta": {
                    "billed_units": {
                        "input_tokens": 10,
                    }
                },
            },
        )
    )

    with start_transaction(name="cohere tx"):
        response = client.embed(texts=["hello"], model="text-embedding-3-large")

    assert len(response.embeddings[0]) == 3

    tx = events[0]
    assert tx["type"] == "transaction"
    span = tx["spans"][0]
    assert span["op"] == "ai.embeddings.create.cohere"
    if send_default_pii and include_prompts:
        assert "hello" in span["data"]["ai.input_messages"]
    else:
        assert "ai.input_messages" not in span["data"]

    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 10


def test_span_origin_chat(sentry_init, capture_events):
    sentry_init(
        integrations=[CohereIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = Client(api_key="z")
    HTTPXClient.request = mock.Mock(
        return_value=httpx.Response(
            200,
            json={
                "text": "the model response",
                "meta": {
                    "billed_units": {
                        "output_tokens": 10,
                        "input_tokens": 20,
                    }
                },
            },
        )
    )

    with start_transaction(name="cohere tx"):
        client.chat(
            model="some-model",
            chat_history=[ChatMessage(role="SYSTEM", message="some context")],
            message="hello",
        ).text

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.ai.cohere"


def test_span_origin_embed(sentry_init, capture_events):
    sentry_init(
        integrations=[CohereIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = Client(api_key="z")
    HTTPXClient.request = mock.Mock(
        return_value=httpx.Response(
            200,
            json={
                "response_type": "embeddings_floats",
                "id": "1",
                "texts": ["hello"],
                "embeddings": [[1.0, 2.0, 3.0]],
                "meta": {
                    "billed_units": {
                        "input_tokens": 10,
                    }
                },
            },
        )
    )

    with start_transaction(name="cohere tx"):
        client.embed(texts=["hello"], model="text-embedding-3-large")

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.ai.cohere"
sentry-python-2.18.0/tests/integrations/conftest.py000066400000000000000000000022061471214654000225200ustar00rootroot00000000000000import pytest
import sentry_sdk


@pytest.fixture
def capture_exceptions(monkeypatch):
    def inner():
        errors = set()
        old_capture_event_hub = sentry_sdk.Hub.capture_event
        old_capture_event_scope = sentry_sdk.Scope.capture_event

        def capture_event_hub(self, event, hint=None, scope=None):
            """
            Can be removed when we remove push_scope and the Hub from the SDK.
            """
            if hint:
                if "exc_info" in hint:
                    error = hint["exc_info"][1]
                    errors.add(error)
            return old_capture_event_hub(self, event, hint=hint, scope=scope)

        def capture_event_scope(self, event, hint=None, scope=None):
            if hint:
                if "exc_info" in hint:
                    error = hint["exc_info"][1]
                    errors.add(error)
            return old_capture_event_scope(self, event, hint=hint, scope=scope)

        monkeypatch.setattr(sentry_sdk.Hub, "capture_event", capture_event_hub)
        monkeypatch.setattr(sentry_sdk.Scope, "capture_event", capture_event_scope)

        return errors

    return inner
sentry-python-2.18.0/tests/integrations/django/000077500000000000000000000000001471214654000215635ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/django/__init__.py000066400000000000000000000004341471214654000236750ustar00rootroot00000000000000import os
import sys
import pytest

pytest.importorskip("django")

# Load `django_helpers` into the module search path to test query source path names relative to module. See
# `test_query_source_with_module_in_search_path`
sys.path.insert(0, os.path.join(os.path.dirname(__file__)))
sentry-python-2.18.0/tests/integrations/django/asgi/000077500000000000000000000000001471214654000225065ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/django/asgi/__init__.py000066400000000000000000000000571471214654000246210ustar00rootroot00000000000000import pytest

pytest.importorskip("channels")
sentry-python-2.18.0/tests/integrations/django/asgi/image.png000066400000000000000000000004641471214654000243020ustar00rootroot00000000000000PNG


IHDR
	IDATWcHsWT,pƃϟ+e+FQ0}^-//CfR3
VWhgV׵d2ܺlzjVB!H#SM/;'15e0H6$[72iȃM32bXd;PS1KJ04`H2fÌ5b.rfO_`4;PלfŘ
M
fh@ 4x8LIENDB`sentry-python-2.18.0/tests/integrations/django/asgi/test_asgi.py000066400000000000000000000477751471214654000250660ustar00rootroot00000000000000import base64
import sys
import json
import inspect
import asyncio
import os
from unittest import mock

import django
import pytest
from channels.testing import HttpCommunicator
from sentry_sdk import capture_message
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.django.asgi import _asgi_middleware_mixin_factory
from tests.integrations.django.myapp.asgi import channels_application

try:
    from django.urls import reverse
except ImportError:
    from django.core.urlresolvers import reverse


APPS = [channels_application]
if django.VERSION >= (3, 0):
    from tests.integrations.django.myapp.asgi import asgi_application

    APPS += [asgi_application]


@pytest.mark.parametrize("application", APPS)
@pytest.mark.asyncio
@pytest.mark.forked
async def test_basic(sentry_init, capture_events, application):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
    )

    events = capture_events()

    comm = HttpCommunicator(application, "GET", "/view-exc?test=query")
    response = await comm.get_response()
    await comm.wait()

    assert response["status"] == 500

    (event,) = events

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"

    # Test that the ASGI middleware got set up correctly. Right now this needs
    # to be installed manually (see myapp/asgi.py)
    assert event["transaction"] == "/view-exc"
    assert event["request"] == {
        "cookies": {},
        "headers": {},
        "method": "GET",
        "query_string": "test=query",
        "url": "/view-exc",
    }

    capture_message("hi")
    event = events[-1]
    assert "request" not in event


@pytest.mark.parametrize("application", APPS)
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_views(sentry_init, capture_events, application):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
    )

    events = capture_events()

    comm = HttpCommunicator(application, "GET", "/async_message")
    response = await comm.get_response()
    await comm.wait()

    assert response["status"] == 200

    (event,) = events

    assert event["transaction"] == "/async_message"
    assert event["request"] == {
        "cookies": {},
        "headers": {},
        "method": "GET",
        "query_string": None,
        "url": "/async_message",
    }


@pytest.mark.parametrize("application", APPS)
@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_active_thread_id(
    sentry_init, capture_envelopes, teardown_profiling, endpoint, application
):
    with mock.patch(
        "sentry_sdk.profiler.transaction_profiler.PROFILE_MINIMUM_SAMPLES", 0
    ):
        sentry_init(
            integrations=[DjangoIntegration()],
            traces_sample_rate=1.0,
            profiles_sample_rate=1.0,
        )

        envelopes = capture_envelopes()

        comm = HttpCommunicator(application, "GET", endpoint)
        response = await comm.get_response()
        await comm.wait()

        assert response["status"] == 200, response["body"]

    assert len(envelopes) == 1

    profiles = [item for item in envelopes[0].items if item.type == "profile"]
    assert len(profiles) == 1

    data = json.loads(response["body"])

    for item in profiles:
        transactions = item.payload.json["transactions"]
        assert len(transactions) == 1
        assert str(data["active"]) == transactions[0]["active_thread_id"]

    transactions = [item for item in envelopes[0].items if item.type == "transaction"]
    assert len(transactions) == 1

    for item in transactions:
        transaction = item.payload.json
        trace_context = transaction["contexts"]["trace"]
        assert str(data["active"]) == trace_context["data"]["thread.id"]


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_views_concurrent_execution(sentry_init, settings):
    import asyncio
    import time

    settings.MIDDLEWARE = []
    asgi_application.load_middleware(is_async=True)

    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
    )

    comm = HttpCommunicator(
        asgi_application, "GET", "/my_async_view"
    )  # sleeps for 1 second
    comm2 = HttpCommunicator(
        asgi_application, "GET", "/my_async_view"
    )  # sleeps for 1 second

    loop = asyncio.get_event_loop()

    start = time.time()

    r1 = loop.create_task(comm.get_response(timeout=5))
    r2 = loop.create_task(comm2.get_response(timeout=5))

    (resp1, resp2), _ = await asyncio.wait({r1, r2})

    end = time.time()

    assert resp1.result()["status"] == 200
    assert resp2.result()["status"] == 200

    assert (
        end - start < 2
    )  # it takes less than 2 seconds so it was ececuting concurrently


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_middleware_that_is_function_concurrent_execution(
    sentry_init, settings
):
    import asyncio
    import time

    settings.MIDDLEWARE = [
        "tests.integrations.django.myapp.middleware.simple_middleware"
    ]
    asgi_application.load_middleware(is_async=True)

    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
    )

    comm = HttpCommunicator(
        asgi_application, "GET", "/my_async_view"
    )  # sleeps for 1 second
    comm2 = HttpCommunicator(
        asgi_application, "GET", "/my_async_view"
    )  # sleeps for 1 second

    loop = asyncio.get_event_loop()

    start = time.time()

    r1 = loop.create_task(comm.get_response(timeout=5))
    r2 = loop.create_task(comm2.get_response(timeout=5))

    (resp1, resp2), _ = await asyncio.wait({r1, r2})

    end = time.time()

    assert resp1.result()["status"] == 200
    assert resp2.result()["status"] == 200

    assert (
        end - start < 2
    )  # it takes less than 2 seconds so it was ececuting concurrently


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_middleware_spans(
    sentry_init, render_span_tree, capture_events, settings
):
    settings.MIDDLEWARE = [
        "django.contrib.sessions.middleware.SessionMiddleware",
        "django.contrib.auth.middleware.AuthenticationMiddleware",
        "django.middleware.csrf.CsrfViewMiddleware",
        "tests.integrations.django.myapp.settings.TestMiddleware",
    ]
    asgi_application.load_middleware(is_async=True)

    sentry_init(
        integrations=[DjangoIntegration(middleware_spans=True)],
        traces_sample_rate=1.0,
        _experiments={"record_sql_params": True},
    )

    events = capture_events()

    comm = HttpCommunicator(asgi_application, "GET", "/simple_async_view")
    response = await comm.get_response()
    await comm.wait()

    assert response["status"] == 200

    (transaction,) = events

    assert (
        render_span_tree(transaction)
        == """\
- op="http.server": description=null
  - op="event.django": description="django.db.reset_queries"
  - op="event.django": description="django.db.close_old_connections"
  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
          - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
          - op="view.render": description="simple_async_view"
  - op="event.django": description="django.db.close_old_connections"
  - op="event.django": description="django.core.cache.close_caches"
  - op="event.django": description="django.core.handlers.base.reset_urlconf\""""
    )


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_has_trace_if_performance_enabled(sentry_init, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        traces_sample_rate=1.0,
    )

    events = capture_events()

    comm = HttpCommunicator(asgi_application, "GET", "/view-exc-with-msg")
    response = await comm.get_response()
    await comm.wait()

    assert response["status"] == 500

    (msg_event, error_event, transaction_event) = events

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_has_trace_if_performance_disabled(sentry_init, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
    )

    events = capture_events()

    comm = HttpCommunicator(asgi_application, "GET", "/view-exc-with-msg")
    response = await comm.get_response()
    await comm.wait()

    assert response["status"] == 500

    (msg_event, error_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]
    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_trace_from_headers_if_performance_enabled(sentry_init, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        traces_sample_rate=1.0,
    )

    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    comm = HttpCommunicator(
        asgi_application,
        "GET",
        "/view-exc-with-msg",
        headers=[(b"sentry-trace", sentry_trace_header.encode())],
    )
    response = await comm.get_response()
    await comm.wait()

    assert response["status"] == 500

    (msg_event, error_event, transaction_event) = events

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id


@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_trace_from_headers_if_performance_disabled(sentry_init, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
    )

    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    comm = HttpCommunicator(
        asgi_application,
        "GET",
        "/view-exc-with-msg",
        headers=[(b"sentry-trace", sentry_trace_header.encode())],
    )
    response = await comm.get_response()
    await comm.wait()

    assert response["status"] == 500

    (msg_event, error_event) = events

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id


PICTURE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "image.png")
BODY_FORM = """--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="username"\r\n\r\nJane\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="password"\r\n\r\nhello123\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="photo"; filename="image.png"\r\nContent-Type: image/png\r\nContent-Transfer-Encoding: base64\r\n\r\n{{image_data}}\r\n--fd721ef49ea403a6--\r\n""".replace(
    "{{image_data}}", base64.b64encode(open(PICTURE, "rb").read()).decode("utf-8")
).encode(
    "utf-8"
)
BODY_FORM_CONTENT_LENGTH = str(len(BODY_FORM)).encode("utf-8")


@pytest.mark.parametrize("application", APPS)
@pytest.mark.parametrize(
    "send_default_pii,method,headers,url_name,body,expected_data",
    [
        (
            True,
            "POST",
            [(b"content-type", b"text/plain")],
            "post_echo_async",
            b"",
            None,
        ),
        (
            True,
            "POST",
            [(b"content-type", b"text/plain")],
            "post_echo_async",
            b"some raw text body",
            "",
        ),
        (
            True,
            "POST",
            [(b"content-type", b"application/json")],
            "post_echo_async",
            b'{"username":"xyz","password":"xyz"}',
            {"username": "xyz", "password": "[Filtered]"},
        ),
        (
            True,
            "POST",
            [(b"content-type", b"application/xml")],
            "post_echo_async",
            b'',
            "",
        ),
        (
            True,
            "POST",
            [
                (b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"),
                (b"content-length", BODY_FORM_CONTENT_LENGTH),
            ],
            "post_echo_async",
            BODY_FORM,
            {"password": "[Filtered]", "photo": "", "username": "Jane"},
        ),
        (
            False,
            "POST",
            [(b"content-type", b"text/plain")],
            "post_echo_async",
            b"",
            None,
        ),
        (
            False,
            "POST",
            [(b"content-type", b"text/plain")],
            "post_echo_async",
            b"some raw text body",
            "",
        ),
        (
            False,
            "POST",
            [(b"content-type", b"application/json")],
            "post_echo_async",
            b'{"username":"xyz","password":"xyz"}',
            {"username": "xyz", "password": "[Filtered]"},
        ),
        (
            False,
            "POST",
            [(b"content-type", b"application/xml")],
            "post_echo_async",
            b'',
            "",
        ),
        (
            False,
            "POST",
            [
                (b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"),
                (b"content-length", BODY_FORM_CONTENT_LENGTH),
            ],
            "post_echo_async",
            BODY_FORM,
            {"password": "[Filtered]", "photo": "", "username": "Jane"},
        ),
    ],
)
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_asgi_request_body(
    sentry_init,
    capture_envelopes,
    application,
    send_default_pii,
    method,
    headers,
    url_name,
    body,
    expected_data,
):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=send_default_pii,
    )

    envelopes = capture_envelopes()

    comm = HttpCommunicator(
        application,
        method=method,
        headers=headers,
        path=reverse(url_name),
        body=body,
    )
    response = await comm.get_response()
    await comm.wait()

    assert response["status"] == 200
    assert response["body"] == body

    (envelope,) = envelopes
    event = envelope.get_event()

    if expected_data is not None:
        assert event["request"]["data"] == expected_data
    else:
        assert "data" not in event["request"]


@pytest.mark.asyncio
@pytest.mark.skipif(
    sys.version_info >= (3, 12),
    reason=(
        "asyncio.iscoroutinefunction has been replaced in 3.12 by inspect.iscoroutinefunction"
    ),
)
async def test_asgi_mixin_iscoroutinefunction_before_3_12():
    sentry_asgi_mixin = _asgi_middleware_mixin_factory(lambda: None)

    async def get_response(): ...

    instance = sentry_asgi_mixin(get_response)
    assert asyncio.iscoroutinefunction(instance)


@pytest.mark.skipif(
    sys.version_info >= (3, 12),
    reason=(
        "asyncio.iscoroutinefunction has been replaced in 3.12 by inspect.iscoroutinefunction"
    ),
)
def test_asgi_mixin_iscoroutinefunction_when_not_async_before_3_12():
    sentry_asgi_mixin = _asgi_middleware_mixin_factory(lambda: None)

    def get_response(): ...

    instance = sentry_asgi_mixin(get_response)
    assert not asyncio.iscoroutinefunction(instance)


@pytest.mark.asyncio
@pytest.mark.skipif(
    sys.version_info < (3, 12),
    reason=(
        "asyncio.iscoroutinefunction has been replaced in 3.12 by inspect.iscoroutinefunction"
    ),
)
async def test_asgi_mixin_iscoroutinefunction_after_3_12():
    sentry_asgi_mixin = _asgi_middleware_mixin_factory(lambda: None)

    async def get_response(): ...

    instance = sentry_asgi_mixin(get_response)
    assert inspect.iscoroutinefunction(instance)


@pytest.mark.skipif(
    sys.version_info < (3, 12),
    reason=(
        "asyncio.iscoroutinefunction has been replaced in 3.12 by inspect.iscoroutinefunction"
    ),
)
def test_asgi_mixin_iscoroutinefunction_when_not_async_after_3_12():
    sentry_asgi_mixin = _asgi_middleware_mixin_factory(lambda: None)

    def get_response(): ...

    instance = sentry_asgi_mixin(get_response)
    assert not inspect.iscoroutinefunction(instance)


@pytest.mark.parametrize("application", APPS)
@pytest.mark.asyncio
async def test_async_view(sentry_init, capture_events, application):
    sentry_init(
        integrations=[DjangoIntegration()],
        traces_sample_rate=1.0,
    )

    events = capture_events()

    comm = HttpCommunicator(application, "GET", "/simple_async_view")
    await comm.get_response()
    await comm.wait()

    (event,) = events
    assert event["type"] == "transaction"
    assert event["transaction"] == "/simple_async_view"


@pytest.mark.parametrize("application", APPS)
@pytest.mark.asyncio
async def test_transaction_http_method_default(
    sentry_init, capture_events, application
):
    """
    By default OPTIONS and HEAD requests do not create a transaction.
    """
    sentry_init(
        integrations=[DjangoIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    comm = HttpCommunicator(application, "GET", "/simple_async_view")
    await comm.get_response()
    await comm.wait()

    comm = HttpCommunicator(application, "OPTIONS", "/simple_async_view")
    await comm.get_response()
    await comm.wait()

    comm = HttpCommunicator(application, "HEAD", "/simple_async_view")
    await comm.get_response()
    await comm.wait()

    (event,) = events

    assert len(events) == 1
    assert event["request"]["method"] == "GET"


@pytest.mark.parametrize("application", APPS)
@pytest.mark.asyncio
async def test_transaction_http_method_custom(sentry_init, capture_events, application):
    sentry_init(
        integrations=[
            DjangoIntegration(
                http_methods_to_capture=(
                    "OPTIONS",
                    "head",
                ),  # capitalization does not matter
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    comm = HttpCommunicator(application, "GET", "/simple_async_view")
    await comm.get_response()
    await comm.wait()

    comm = HttpCommunicator(application, "OPTIONS", "/simple_async_view")
    await comm.get_response()
    await comm.wait()

    comm = HttpCommunicator(application, "HEAD", "/simple_async_view")
    await comm.get_response()
    await comm.wait()

    assert len(events) == 2

    (event1, event2) = events
    assert event1["request"]["method"] == "OPTIONS"
    assert event2["request"]["method"] == "HEAD"
sentry-python-2.18.0/tests/integrations/django/django_helpers/000077500000000000000000000000001471214654000245475ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/django/django_helpers/__init__.py000066400000000000000000000000001471214654000266460ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/django/django_helpers/views.py000066400000000000000000000004561471214654000262630ustar00rootroot00000000000000from django.contrib.auth.models import User
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt


@csrf_exempt
def postgres_select_orm(request, *args, **kwargs):
    user = User.objects.using("postgres").all().first()
    return HttpResponse("ok {}".format(user))
sentry-python-2.18.0/tests/integrations/django/myapp/000077500000000000000000000000001471214654000227115ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/django/myapp/__init__.py000066400000000000000000000000001471214654000250100ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/django/myapp/asgi.py000066400000000000000000000007471471214654000242160ustar00rootroot00000000000000"""
ASGI entrypoint. Configures Django and then runs the application
defined in the ASGI_APPLICATION setting.
"""

import os
import django
from channels.routing import get_default_application

os.environ.setdefault(
    "DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
)

django.setup()
channels_application = get_default_application()

if django.VERSION >= (3, 0):
    from django.core.asgi import get_asgi_application

    asgi_application = get_asgi_application()
sentry-python-2.18.0/tests/integrations/django/myapp/custom_urls.py000066400000000000000000000017241471214654000256460ustar00rootroot00000000000000"""myapp URL Configuration

The `urlpatterns` list routes URLs to views. For more information please see:
    https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
    1. Add an import:  from my_app import views
    2. Add a URL to urlpatterns:  path('', views.home, name='home')
Class-based views
    1. Add an import:  from other_app.views import Home
    2. Add a URL to urlpatterns:  path('', Home.as_view(), name='home')
Including another URLconf
    1. Import the include() function: from django.urls import include, path
    2. Add a URL to urlpatterns:  path('blog/', include('blog.urls'))
"""

try:
    from django.urls import path
except ImportError:
    from django.conf.urls import url

    def path(path, *args, **kwargs):
        return url("^{}$".format(path), *args, **kwargs)


from . import views

urlpatterns = [
    path("custom/ok", views.custom_ok, name="custom_ok"),
    path("custom/exc", views.custom_exc, name="custom_exc"),
]
sentry-python-2.18.0/tests/integrations/django/myapp/manage.py000066400000000000000000000004341471214654000245140ustar00rootroot00000000000000#!/usr/bin/env python
import os
import sys

if __name__ == "__main__":
    os.environ.setdefault(
        "DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
    )

    from django.core.management import execute_from_command_line

execute_from_command_line(sys.argv)
sentry-python-2.18.0/tests/integrations/django/myapp/management/000077500000000000000000000000001471214654000250255ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/django/myapp/management/__init__.py000066400000000000000000000000001471214654000271240ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/django/myapp/management/commands/000077500000000000000000000000001471214654000266265ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/django/myapp/management/commands/__init__.py000066400000000000000000000000001471214654000307250ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/django/myapp/management/commands/mycrash.py000066400000000000000000000002731471214654000306500ustar00rootroot00000000000000from django.core.management.base import BaseCommand


class Command(BaseCommand):
    def add_arguments(self, parser):
        pass

    def handle(self, *args, **options):
        1 / 0
sentry-python-2.18.0/tests/integrations/django/myapp/middleware.py000066400000000000000000000014201471214654000253750ustar00rootroot00000000000000import django

if django.VERSION >= (3, 1):
    import asyncio
    from django.utils.decorators import sync_and_async_middleware

    @sync_and_async_middleware
    def simple_middleware(get_response):
        if asyncio.iscoroutinefunction(get_response):

            async def middleware(request):
                response = await get_response(request)
                return response

        else:

            def middleware(request):
                response = get_response(request)
                return response

        return middleware


def custom_urlconf_middleware(get_response):
    def middleware(request):
        request.urlconf = "tests.integrations.django.myapp.custom_urls"
        response = get_response(request)
        return response

    return middleware
sentry-python-2.18.0/tests/integrations/django/myapp/routing.py000066400000000000000000000007561471214654000247620ustar00rootroot00000000000000import channels
from channels.routing import ProtocolTypeRouter

try:
    from channels.http import AsgiHandler

    if channels.__version__ < "3.0.0":
        django_asgi_app = AsgiHandler
    else:
        django_asgi_app = AsgiHandler()

except ModuleNotFoundError:
    # Since channels 4.0 ASGI handling is done by Django itself
    from django.core.asgi import get_asgi_application

    django_asgi_app = get_asgi_application()

application = ProtocolTypeRouter({"http": django_asgi_app})
sentry-python-2.18.0/tests/integrations/django/myapp/settings.py000066400000000000000000000116751471214654000251350ustar00rootroot00000000000000"""
Django settings for myapp project.

Generated by 'django-admin startproject' using Django 2.0.7.

For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/

For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""

# We shouldn't access settings while setting up integrations. Initialize SDK
# here to provoke any errors that might occur.
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration

sentry_sdk.init(integrations=[DjangoIntegration()])

import os

from django.utils.deprecation import MiddlewareMixin

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))


# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "u95e#xr$t3!vdux)fj11!*q*^w^^r#kiyrvt3kjui-t_k%m3op"

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True

ALLOWED_HOSTS = ["localhost"]


# Application definition

INSTALLED_APPS = [
    "django.contrib.auth",
    "django.contrib.contenttypes",
    "django.contrib.sessions",
    "django.contrib.messages",
    "django.contrib.staticfiles",
    "tests.integrations.django.myapp",
]


class TestMiddleware(MiddlewareMixin):
    def process_request(self, request):
        # https://github.com/getsentry/sentry-python/issues/837 -- We should
        # not touch the resolver_match because apparently people rely on it.
        if request.resolver_match:
            assert not getattr(request.resolver_match.callback, "__wrapped__", None)

        if "middleware-exc" in request.path:
            1 / 0

    def process_response(self, request, response):
        return response


def TestFunctionMiddleware(get_response):  # noqa: N802
    def middleware(request):
        return get_response(request)

    return middleware


MIDDLEWARE_CLASSES = [
    "django.contrib.sessions.middleware.SessionMiddleware",
    "django.contrib.auth.middleware.AuthenticationMiddleware",
    "django.middleware.csrf.CsrfViewMiddleware",
    "tests.integrations.django.myapp.settings.TestMiddleware",
]

if MiddlewareMixin is not object:
    MIDDLEWARE = MIDDLEWARE_CLASSES + [
        "tests.integrations.django.myapp.settings.TestFunctionMiddleware"
    ]


ROOT_URLCONF = "tests.integrations.django.myapp.urls"

TEMPLATES = [
    {
        "BACKEND": "django.template.backends.django.DjangoTemplates",
        "DIRS": [],
        "APP_DIRS": True,
        "OPTIONS": {
            "debug": True,
            "context_processors": [
                "django.template.context_processors.debug",
                "django.template.context_processors.request",
                "django.contrib.auth.context_processors.auth",
                "django.contrib.messages.context_processors.messages",
            ],
        },
    }
]

WSGI_APPLICATION = "tests.integrations.django.myapp.wsgi.application"


# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases

DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}

try:
    import psycopg2  # noqa

    db_engine = "django.db.backends.postgresql"
    try:
        from django.db.backends import postgresql  # noqa: F401
    except ImportError:
        db_engine = "django.db.backends.postgresql_psycopg2"

    DATABASES["postgres"] = {
        "ENGINE": db_engine,
        "HOST": os.environ.get("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost"),
        "PORT": int(os.environ.get("SENTRY_PYTHON_TEST_POSTGRES_PORT", "5432")),
        "USER": os.environ.get("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres"),
        "PASSWORD": os.environ.get("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry"),
        "NAME": os.environ.get(
            "SENTRY_PYTHON_TEST_POSTGRES_NAME", f"myapp_db_{os.getpid()}"
        ),
    }
except (ImportError, KeyError):
    from sentry_sdk.utils import logger

    logger.warning("No psycopg2 found, testing with SQLite.")


# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators

AUTH_PASSWORD_VALIDATORS = [
    {
        "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
    },
    {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
    {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
    {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]


# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/

LANGUAGE_CODE = "en-us"

TIME_ZONE = "UTC"

USE_I18N = True

USE_L10N = True

USE_TZ = False

TEMPLATE_DEBUG = True


# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/

STATIC_URL = "/static/"

# django-channels specific
ASGI_APPLICATION = "tests.integrations.django.myapp.routing.application"
sentry-python-2.18.0/tests/integrations/django/myapp/signals.py000066400000000000000000000005671471214654000247330ustar00rootroot00000000000000from django.core import signals
from django.dispatch import receiver

myapp_custom_signal = signals.Signal()
myapp_custom_signal_silenced = signals.Signal()


@receiver(myapp_custom_signal)
def signal_handler(sender, **kwargs):
    assert sender == "hello"


@receiver(myapp_custom_signal_silenced)
def signal_handler_silenced(sender, **kwargs):
    assert sender == "hello"
sentry-python-2.18.0/tests/integrations/django/myapp/templates/000077500000000000000000000000001471214654000247075ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/django/myapp/templates/error.html000066400000000000000000000001131471214654000267210ustar00rootroot000000000000001
2
3
4
5
6
7
8
9
{% invalid template tag %}
11
12
13
14
15
16
17
18
19
20
sentry-python-2.18.0/tests/integrations/django/myapp/templates/trace_meta.html000066400000000000000000000000301471214654000276720ustar00rootroot00000000000000{{ sentry_trace_meta }}
sentry-python-2.18.0/tests/integrations/django/myapp/templates/user_name.html000066400000000000000000000000431471214654000275500ustar00rootroot00000000000000{{ request.user }}: {{ user_age }}
sentry-python-2.18.0/tests/integrations/django/myapp/urls.py000066400000000000000000000111741471214654000242540ustar00rootroot00000000000000"""myapp URL Configuration

The `urlpatterns` list routes URLs to views. For more information please see:
    https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
    1. Add an import:  from my_app import views
    2. Add a URL to urlpatterns:  path('', views.home, name='home')
Class-based views
    1. Add an import:  from other_app.views import Home
    2. Add a URL to urlpatterns:  path('', Home.as_view(), name='home')
Including another URLconf
    1. Import the include() function: from django.urls import include, path
    2. Add a URL to urlpatterns:  path('blog/', include('blog.urls'))
"""

try:
    from django.urls import path
except ImportError:
    from django.conf.urls import url

    def path(path, *args, **kwargs):
        return url("^{}$".format(path), *args, **kwargs)


from . import views
from django_helpers import views as helper_views

urlpatterns = [
    path("view-exc", views.view_exc, name="view_exc"),
    path("view-exc-with-msg", views.view_exc_with_msg, name="view_exc_with_msg"),
    path("cached-view", views.cached_view, name="cached_view"),
    path("not-cached-view", views.not_cached_view, name="not_cached_view"),
    path(
        "view-with-cached-template-fragment",
        views.view_with_cached_template_fragment,
        name="view_with_cached_template_fragment",
    ),
    path(
        "read-body-and-view-exc",
        views.read_body_and_view_exc,
        name="read_body_and_view_exc",
    ),
    path("middleware-exc", views.message, name="middleware_exc"),
    path("message", views.message, name="message"),
    path("nomessage", views.nomessage, name="nomessage"),
    path("view-with-signal", views.view_with_signal, name="view_with_signal"),
    path("mylogin", views.mylogin, name="mylogin"),
    path("classbased", views.ClassBasedView.as_view(), name="classbased"),
    path("sentryclass", views.SentryClassBasedView(), name="sentryclass"),
    path(
        "sentryclass-csrf",
        views.SentryClassBasedViewWithCsrf(),
        name="sentryclass_csrf",
    ),
    path("post-echo", views.post_echo, name="post_echo"),
    path("template-exc", views.template_exc, name="template_exc"),
    path("template-test", views.template_test, name="template_test"),
    path("template-test2", views.template_test2, name="template_test2"),
    path("template-test3", views.template_test3, name="template_test3"),
    path("postgres-select", views.postgres_select, name="postgres_select"),
    path("postgres-select-slow", views.postgres_select_orm, name="postgres_select_orm"),
    path(
        "postgres-select-slow-from-supplement",
        helper_views.postgres_select_orm,
        name="postgres_select_slow_from_supplement",
    ),
    path(
        "permission-denied-exc",
        views.permission_denied_exc,
        name="permission_denied_exc",
    ),
    path(
        "csrf-hello-not-exempt",
        views.csrf_hello_not_exempt,
        name="csrf_hello_not_exempt",
    ),
    path("sync/thread_ids", views.thread_ids_sync, name="thread_ids_sync"),
    path(
        "send-myapp-custom-signal",
        views.send_myapp_custom_signal,
        name="send_myapp_custom_signal",
    ),
]

# async views
if views.async_message is not None:
    urlpatterns.append(path("async_message", views.async_message, name="async_message"))

if views.my_async_view is not None:
    urlpatterns.append(path("my_async_view", views.my_async_view, name="my_async_view"))

if views.my_async_view is not None:
    urlpatterns.append(
        path("simple_async_view", views.simple_async_view, name="simple_async_view")
    )

if views.thread_ids_async is not None:
    urlpatterns.append(
        path("async/thread_ids", views.thread_ids_async, name="thread_ids_async")
    )

if views.post_echo_async is not None:
    urlpatterns.append(
        path("post_echo_async", views.post_echo_async, name="post_echo_async")
    )

# rest framework
try:
    urlpatterns.append(
        path("rest-framework-exc", views.rest_framework_exc, name="rest_framework_exc")
    )
    urlpatterns.append(
        path(
            "rest-framework-read-body-and-exc",
            views.rest_framework_read_body_and_exc,
            name="rest_framework_read_body_and_exc",
        )
    )
    urlpatterns.append(path("rest-hello", views.rest_hello, name="rest_hello"))
    urlpatterns.append(
        path("rest-json-response", views.rest_json_response, name="rest_json_response")
    )
    urlpatterns.append(
        path(
            "rest-permission-denied-exc",
            views.rest_permission_denied_exc,
            name="rest_permission_denied_exc",
        )
    )
except AttributeError:
    pass

handler500 = views.handler500
handler404 = views.handler404
sentry-python-2.18.0/tests/integrations/django/myapp/views.py000066400000000000000000000146471471214654000244340ustar00rootroot00000000000000import asyncio
import json
import threading

from django.contrib.auth import login
from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied
from django.dispatch import Signal
from django.http import HttpResponse, HttpResponseNotFound, HttpResponseServerError
from django.shortcuts import render
from django.template import Context, Template
from django.template.response import TemplateResponse
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import ListView


from tests.integrations.django.myapp.signals import (
    myapp_custom_signal,
    myapp_custom_signal_silenced,
)

try:
    from rest_framework.decorators import api_view
    from rest_framework.response import Response

    @api_view(["POST"])
    def rest_framework_exc(request):
        1 / 0

    @api_view(["POST"])
    def rest_framework_read_body_and_exc(request):
        request.data
        1 / 0

    @api_view(["GET"])
    def rest_hello(request):
        return HttpResponse("ok")

    @api_view(["GET"])
    def rest_permission_denied_exc(request):
        raise PermissionDenied("bye")

    @api_view(["GET"])
    def rest_json_response(request):
        return Response(dict(ok=True))

except ImportError:
    pass


import sentry_sdk
from sentry_sdk import capture_message


@csrf_exempt
def view_exc(request):
    1 / 0


@csrf_exempt
def view_exc_with_msg(request):
    capture_message("oops")
    1 / 0


@cache_page(60)
def cached_view(request):
    return HttpResponse("ok")


def not_cached_view(request):
    return HttpResponse("ok")


def view_with_cached_template_fragment(request):
    template = Template(
        """{% load cache %}
        Not cached content goes here.
        {% cache 500 some_identifier %}
            And here some cached content.
        {% endcache %}
        """
    )
    rendered = template.render(Context({}))
    return HttpResponse(rendered)


# This is a "class based view" as previously found in the sentry codebase. The
# interesting property of this one is that csrf_exempt, as a class attribute,
# is not in __dict__, so regular use of functools.wraps will not forward the
# attribute.
class SentryClassBasedView:
    csrf_exempt = True

    def __call__(self, request):
        return HttpResponse("ok")


class SentryClassBasedViewWithCsrf:
    def __call__(self, request):
        return HttpResponse("ok")


@csrf_exempt
def read_body_and_view_exc(request):
    request.read()
    1 / 0


@csrf_exempt
def message(request):
    sentry_sdk.capture_message("hi")
    return HttpResponse("ok")


@csrf_exempt
def nomessage(request):
    return HttpResponse("ok")


@csrf_exempt
def view_with_signal(request):
    custom_signal = Signal()
    custom_signal.send(sender="hello")
    return HttpResponse("ok")


@csrf_exempt
def mylogin(request):
    user = User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword")
    user.backend = "django.contrib.auth.backends.ModelBackend"
    login(request, user)
    return HttpResponse("ok")


@csrf_exempt
def handler500(request):
    return HttpResponseServerError("Sentry error.")


class ClassBasedView(ListView):
    model = None

    @method_decorator(csrf_exempt)
    def dispatch(self, request, *args, **kwargs):
        return super().dispatch(request, *args, **kwargs)

    def head(self, *args, **kwargs):
        sentry_sdk.capture_message("hi")
        return HttpResponse("")

    def post(self, *args, **kwargs):
        return HttpResponse("ok")


@csrf_exempt
def post_echo(request):
    sentry_sdk.capture_message("hi")
    return HttpResponse(request.body)


@csrf_exempt
def handler404(*args, **kwargs):
    sentry_sdk.capture_message("not found", level="error")
    return HttpResponseNotFound("404")


@csrf_exempt
def template_exc(request, *args, **kwargs):
    return render(request, "error.html")


@csrf_exempt
def template_test(request, *args, **kwargs):
    return render(request, "user_name.html", {"user_age": 20})


@csrf_exempt
def custom_ok(request, *args, **kwargs):
    return HttpResponse("custom ok")


@csrf_exempt
def custom_exc(request, *args, **kwargs):
    1 / 0


@csrf_exempt
def template_test2(request, *args, **kwargs):
    return TemplateResponse(
        request, ("user_name.html", "another_template.html"), {"user_age": 25}
    )


@csrf_exempt
def template_test3(request, *args, **kwargs):
    traceparent = sentry_sdk.get_current_scope().get_traceparent()
    if traceparent is None:
        traceparent = sentry_sdk.get_isolation_scope().get_traceparent()

    baggage = sentry_sdk.get_current_scope().get_baggage()
    if baggage is None:
        baggage = sentry_sdk.get_isolation_scope().get_baggage()

    capture_message(traceparent + "\n" + baggage.serialize())
    return render(request, "trace_meta.html", {})


@csrf_exempt
def postgres_select(request, *args, **kwargs):
    from django.db import connections

    cursor = connections["postgres"].cursor()
    cursor.execute("SELECT 1;")
    return HttpResponse("ok")


@csrf_exempt
def postgres_select_orm(request, *args, **kwargs):
    user = User.objects.using("postgres").all().first()
    return HttpResponse("ok {}".format(user))


@csrf_exempt
def permission_denied_exc(*args, **kwargs):
    raise PermissionDenied("bye")


def csrf_hello_not_exempt(*args, **kwargs):
    return HttpResponse("ok")


def thread_ids_sync(*args, **kwargs):
    response = json.dumps(
        {
            "main": threading.main_thread().ident,
            "active": threading.current_thread().ident,
        }
    )
    return HttpResponse(response)


async def async_message(request):
    sentry_sdk.capture_message("hi")
    return HttpResponse("ok")


async def my_async_view(request):
    await asyncio.sleep(1)
    return HttpResponse("Hello World")


async def simple_async_view(request):
    return HttpResponse("Simple Hello World")


async def thread_ids_async(request):
    response = json.dumps(
        {
            "main": threading.main_thread().ident,
            "active": threading.current_thread().ident,
        }
    )
    return HttpResponse(response)


async def post_echo_async(request):
    sentry_sdk.capture_message("hi")
    return HttpResponse(request.body)


post_echo_async.csrf_exempt = True


@csrf_exempt
def send_myapp_custom_signal(request):
    myapp_custom_signal.send(sender="hello")
    myapp_custom_signal_silenced.send(sender="hello")
    return HttpResponse("ok")
sentry-python-2.18.0/tests/integrations/django/myapp/wsgi.py000066400000000000000000000006431471214654000242370ustar00rootroot00000000000000"""
WSGI config for myapp project.

It exposes the WSGI callable as a module-level variable named ``application``.

For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""

import os

from django.core.wsgi import get_wsgi_application

os.environ.setdefault(
    "DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
)

application = get_wsgi_application()
sentry-python-2.18.0/tests/integrations/django/test_basic.py000066400000000000000000001222131471214654000242560ustar00rootroot00000000000000import inspect
import json
import os
import re
import sys
import pytest
from functools import partial
from unittest.mock import patch

from werkzeug.test import Client

from django import VERSION as DJANGO_VERSION
from django.contrib.auth.models import User
from django.core.management import execute_from_command_line
from django.db.utils import OperationalError, ProgrammingError, DataError
from django.http.request import RawPostDataException
from django.utils.functional import SimpleLazyObject

try:
    from django.urls import reverse
except ImportError:
    from django.core.urlresolvers import reverse

import sentry_sdk
from sentry_sdk._compat import PY310
from sentry_sdk import capture_message, capture_exception
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.django import (
    DjangoIntegration,
    DjangoRequestExtractor,
    _set_db_data,
)
from sentry_sdk.integrations.django.signals_handlers import _get_receiver_name
from sentry_sdk.integrations.executing import ExecutingIntegration
from sentry_sdk.profiler.utils import get_frame_name
from sentry_sdk.tracing import Span
from tests.conftest import unpack_werkzeug_response
from tests.integrations.django.myapp.wsgi import application
from tests.integrations.django.myapp.signals import myapp_custom_signal_silenced
from tests.integrations.django.utils import pytest_mark_django_db_decorator

DJANGO_VERSION = DJANGO_VERSION[:2]


@pytest.fixture
def client():
    return Client(application)


def test_view_exceptions(sentry_init, client, capture_exceptions, capture_events):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    exceptions = capture_exceptions()
    events = capture_events()
    client.get(reverse("view_exc"))

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "django"


def test_ensures_x_forwarded_header_is_honored_in_sdk_when_enabled_in_django(
    sentry_init, client, capture_exceptions, capture_events, settings
):
    """
    Test that ensures if django settings.USE_X_FORWARDED_HOST is set to True
    then the SDK sets the request url to the `HTTP_X_FORWARDED_FOR`
    """
    settings.USE_X_FORWARDED_HOST = True

    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    exceptions = capture_exceptions()
    events = capture_events()
    client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"})

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)

    (event,) = events
    assert event["request"]["url"] == "http://example.com/view-exc"


def test_ensures_x_forwarded_header_is_not_honored_when_unenabled_in_django(
    sentry_init, client, capture_exceptions, capture_events
):
    """
    Test that ensures if django settings.USE_X_FORWARDED_HOST is set to False
    then the SDK sets the request url to the `HTTP_POST`
    """
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    exceptions = capture_exceptions()
    events = capture_events()
    client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"})

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)

    (event,) = events
    assert event["request"]["url"] == "http://localhost/view-exc"


def test_middleware_exceptions(sentry_init, client, capture_exceptions):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    exceptions = capture_exceptions()
    client.get(reverse("middleware_exc"))

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)


def test_request_captured(sentry_init, client, capture_events):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    events = capture_events()
    content, status, headers = unpack_werkzeug_response(client.get(reverse("message")))

    assert content == b"ok"

    (event,) = events
    assert event["transaction"] == "/message"
    assert event["request"] == {
        "cookies": {},
        "env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
        "headers": {"Host": "localhost"},
        "method": "GET",
        "query_string": "",
        "url": "http://localhost/message",
    }


def test_transaction_with_class_view(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration(transaction_style="function_name")],
        send_default_pii=True,
    )
    events = capture_events()
    content, status, headers = unpack_werkzeug_response(
        client.head(reverse("classbased"))
    )
    assert status.lower() == "200 ok"

    (event,) = events

    assert (
        event["transaction"] == "tests.integrations.django.myapp.views.ClassBasedView"
    )
    assert event["message"] == "hi"


def test_has_trace_if_performance_enabled(sentry_init, client, capture_events):
    sentry_init(
        integrations=[
            DjangoIntegration(
                http_methods_to_capture=("HEAD",),
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()
    client.head(reverse("view_exc_with_msg"))

    (msg_event, error_event, transaction_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
    )


def test_has_trace_if_performance_disabled(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
    )
    events = capture_events()
    client.head(reverse("view_exc_with_msg"))

    (msg_event, error_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
    )


def test_trace_from_headers_if_performance_enabled(sentry_init, client, capture_events):
    sentry_init(
        integrations=[
            DjangoIntegration(
                http_methods_to_capture=("HEAD",),
            )
        ],
        traces_sample_rate=1.0,
    )

    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    client.head(
        reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header}
    )

    (msg_event, error_event, transaction_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id


def test_trace_from_headers_if_performance_disabled(
    sentry_init, client, capture_events
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                http_methods_to_capture=("HEAD",),
            )
        ],
    )

    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    client.head(
        reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header}
    )

    (msg_event, error_event) = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_user_captured(sentry_init, client, capture_events):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    events = capture_events()
    content, status, headers = unpack_werkzeug_response(client.get(reverse("mylogin")))
    assert content == b"ok"

    assert not events

    content, status, headers = unpack_werkzeug_response(client.get(reverse("message")))
    assert content == b"ok"

    (event,) = events

    assert event["user"] == {
        "email": "lennon@thebeatles.com",
        "username": "john",
        "id": "1",
    }


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_queryset_repr(sentry_init, capture_events):
    sentry_init(integrations=[DjangoIntegration()])
    events = capture_events()
    User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword")

    try:
        my_queryset = User.objects.all()  # noqa
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    (frame,) = exception["stacktrace"]["frames"]
    assert frame["vars"]["my_queryset"].startswith(
        "\n',
        rendered_meta,
    )
    assert match is not None
    assert match.group(1) == traceparent

    rendered_baggage = match.group(2)
    assert rendered_baggage == baggage


@pytest.mark.parametrize("with_executing_integration", [[], [ExecutingIntegration()]])
def test_template_exception(
    sentry_init, client, capture_events, with_executing_integration
):
    sentry_init(integrations=[DjangoIntegration()] + with_executing_integration)
    events = capture_events()

    content, status, headers = unpack_werkzeug_response(
        client.get(reverse("template_exc"))
    )
    assert status.lower() == "500 internal server error"

    (event,) = events
    exception = event["exception"]["values"][-1]
    assert exception["type"] == "TemplateSyntaxError"

    frames = [
        f
        for f in exception["stacktrace"]["frames"]
        if not f["filename"].startswith("django/")
    ]
    view_frame, template_frame = frames[-2:]

    assert template_frame["context_line"] == "{% invalid template tag %}\n"
    assert template_frame["pre_context"] == ["5\n", "6\n", "7\n", "8\n", "9\n"]

    assert template_frame["post_context"] == ["11\n", "12\n", "13\n", "14\n", "15\n"]
    assert template_frame["lineno"] == 10
    assert template_frame["filename"].endswith("error.html")

    filenames = [
        (f.get("function"), f.get("module")) for f in exception["stacktrace"]["frames"]
    ]

    if with_executing_integration:
        assert filenames[-3:] == [
            ("Parser.parse", "django.template.base"),
            (None, None),
            ("Parser.invalid_block_tag", "django.template.base"),
        ]
    else:
        assert filenames[-3:] == [
            ("parse", "django.template.base"),
            (None, None),
            ("invalid_block_tag", "django.template.base"),
        ]


@pytest.mark.parametrize(
    "route", ["rest_framework_exc", "rest_framework_read_body_and_exc"]
)
@pytest.mark.parametrize(
    "ct,body",
    [
        ["application/json", {"foo": "bar"}],
        ["application/json", 1],
        ["application/json", "foo"],
        ["application/x-www-form-urlencoded", {"foo": "bar"}],
    ],
)
def test_rest_framework_basic(
    sentry_init, client, capture_events, capture_exceptions, ct, body, route
):
    pytest.importorskip("rest_framework")
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    exceptions = capture_exceptions()
    events = capture_events()

    if ct == "application/json":
        client.post(
            reverse(route), data=json.dumps(body), content_type="application/json"
        )
    elif ct == "application/x-www-form-urlencoded":
        client.post(reverse(route), data=body)
    else:
        raise AssertionError("unreachable")

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "django"

    assert event["request"]["data"] == body
    assert event["request"]["headers"]["Content-Type"] == ct


@pytest.mark.parametrize(
    "endpoint", ["rest_permission_denied_exc", "permission_denied_exc"]
)
def test_does_not_capture_403(sentry_init, client, capture_events, endpoint):
    if endpoint == "rest_permission_denied_exc":
        pytest.importorskip("rest_framework")

    sentry_init(integrations=[DjangoIntegration()])
    events = capture_events()

    _, status, _ = unpack_werkzeug_response(client.get(reverse(endpoint)))
    assert status.lower() == "403 forbidden"

    assert not events


def test_render_spans(sentry_init, client, capture_events, render_span_tree):
    sentry_init(
        integrations=[DjangoIntegration()],
        traces_sample_rate=1.0,
    )
    views_tests = [
        (
            reverse("template_test2"),
            '- op="template.render": description="[user_name.html, ...]"',
        ),
    ]
    if DJANGO_VERSION >= (1, 7):
        views_tests.append(
            (
                reverse("template_test"),
                '- op="template.render": description="user_name.html"',
            ),
        )

    for url, expected_line in views_tests:
        events = capture_events()
        client.get(url)
        transaction = events[0]
        assert expected_line in render_span_tree(transaction)


if DJANGO_VERSION >= (1, 10):
    EXPECTED_MIDDLEWARE_SPANS = """\
- op="http.server": description=null
  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__"
          - op="middleware.django": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__"
            - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
            - op="view.render": description="message"\
"""
else:
    EXPECTED_MIDDLEWARE_SPANS = """\
- op="http.server": description=null
  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
  - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
  - op="view.render": description="message"
  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response"
  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_response"
  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\
"""


def test_middleware_spans(sentry_init, client, capture_events, render_span_tree):
    sentry_init(
        integrations=[
            DjangoIntegration(signals_spans=False),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("message"))

    message, transaction = events

    assert message["message"] == "hi"
    assert render_span_tree(transaction) == EXPECTED_MIDDLEWARE_SPANS


def test_middleware_spans_disabled(sentry_init, client, capture_events):
    sentry_init(
        integrations=[
            DjangoIntegration(middleware_spans=False, signals_spans=False),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("message"))

    message, transaction = events

    assert message["message"] == "hi"
    assert not len(transaction["spans"])


EXPECTED_SIGNALS_SPANS = """\
- op="http.server": description=null
  - op="event.django": description="django.db.reset_queries"
  - op="event.django": description="django.db.close_old_connections"\
"""


def test_signals_spans(sentry_init, client, capture_events, render_span_tree):
    sentry_init(
        integrations=[
            DjangoIntegration(middleware_spans=False),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("message"))

    message, transaction = events

    assert message["message"] == "hi"
    assert render_span_tree(transaction) == EXPECTED_SIGNALS_SPANS

    assert transaction["spans"][0]["op"] == "event.django"
    assert transaction["spans"][0]["description"] == "django.db.reset_queries"

    assert transaction["spans"][1]["op"] == "event.django"
    assert transaction["spans"][1]["description"] == "django.db.close_old_connections"


def test_signals_spans_disabled(sentry_init, client, capture_events):
    sentry_init(
        integrations=[
            DjangoIntegration(middleware_spans=False, signals_spans=False),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("message"))

    message, transaction = events

    assert message["message"] == "hi"
    assert not transaction["spans"]


EXPECTED_SIGNALS_SPANS_FILTERED = """\
- op="http.server": description=null
  - op="event.django": description="django.db.reset_queries"
  - op="event.django": description="django.db.close_old_connections"
  - op="event.django": description="tests.integrations.django.myapp.signals.signal_handler"\
"""


def test_signals_spans_filtering(sentry_init, client, capture_events, render_span_tree):
    sentry_init(
        integrations=[
            DjangoIntegration(
                middleware_spans=False,
                signals_denylist=[
                    myapp_custom_signal_silenced,
                ],
            ),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("send_myapp_custom_signal"))

    (transaction,) = events

    assert render_span_tree(transaction) == EXPECTED_SIGNALS_SPANS_FILTERED

    assert transaction["spans"][0]["op"] == "event.django"
    assert transaction["spans"][0]["description"] == "django.db.reset_queries"

    assert transaction["spans"][1]["op"] == "event.django"
    assert transaction["spans"][1]["description"] == "django.db.close_old_connections"

    assert transaction["spans"][2]["op"] == "event.django"
    assert (
        transaction["spans"][2]["description"]
        == "tests.integrations.django.myapp.signals.signal_handler"
    )


def test_csrf(sentry_init, client):
    """
    Assert that CSRF view decorator works even with the view wrapped in our own
    callable.
    """

    sentry_init(integrations=[DjangoIntegration()])

    content, status, _headers = unpack_werkzeug_response(
        client.post(reverse("csrf_hello_not_exempt"))
    )
    assert status.lower() == "403 forbidden"

    content, status, _headers = unpack_werkzeug_response(
        client.post(reverse("sentryclass_csrf"))
    )
    assert status.lower() == "403 forbidden"

    content, status, _headers = unpack_werkzeug_response(
        client.post(reverse("sentryclass"))
    )
    assert status.lower() == "200 ok"
    assert content == b"ok"

    content, status, _headers = unpack_werkzeug_response(
        client.post(reverse("classbased"))
    )
    assert status.lower() == "200 ok"
    assert content == b"ok"

    content, status, _headers = unpack_werkzeug_response(
        client.post(reverse("message"))
    )
    assert status.lower() == "200 ok"
    assert content == b"ok"


@pytest.mark.skipif(DJANGO_VERSION < (2, 0), reason="Requires Django > 2.0")
def test_custom_urlconf_middleware(
    settings, sentry_init, client, capture_events, render_span_tree
):
    """
    Some middlewares (for instance in django-tenants) overwrite request.urlconf.
    Test that the resolver picks up the correct urlconf for transaction naming.
    """
    urlconf = "tests.integrations.django.myapp.middleware.custom_urlconf_middleware"
    settings.ROOT_URLCONF = ""
    settings.MIDDLEWARE.insert(0, urlconf)
    client.application.load_middleware()

    sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    content, status, _headers = unpack_werkzeug_response(client.get("/custom/ok"))
    assert status.lower() == "200 ok"
    assert content == b"custom ok"

    event = events.pop(0)
    assert event["transaction"] == "/custom/ok"
    assert "custom_urlconf_middleware" in render_span_tree(event)

    _content, status, _headers = unpack_werkzeug_response(client.get("/custom/exc"))
    assert status.lower() == "500 internal server error"

    error_event, transaction_event = events
    assert error_event["transaction"] == "/custom/exc"
    assert error_event["exception"]["values"][-1]["mechanism"]["type"] == "django"
    assert transaction_event["transaction"] == "/custom/exc"
    assert "custom_urlconf_middleware" in render_span_tree(transaction_event)

    settings.MIDDLEWARE.pop(0)


def test_get_receiver_name():
    def dummy(a, b):
        return a + b

    name = _get_receiver_name(dummy)

    assert (
        name
        == "tests.integrations.django.test_basic.test_get_receiver_name..dummy"
    )

    a_partial = partial(dummy)
    name = _get_receiver_name(a_partial)
    if PY310:
        assert name == "functools.partial()"
    else:
        assert name == "partial()"


@pytest.mark.skipif(DJANGO_VERSION <= (1, 11), reason="Requires Django > 1.11")
def test_span_origin(sentry_init, client, capture_events):
    sentry_init(
        integrations=[
            DjangoIntegration(
                middleware_spans=True,
                signals_spans=True,
                cache_spans=True,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("view_with_signal"))

    (transaction,) = events

    assert transaction["contexts"]["trace"]["origin"] == "auto.http.django"

    signal_span_found = False
    for span in transaction["spans"]:
        assert span["origin"] == "auto.http.django"
        if span["op"] == "event.django":
            signal_span_found = True

    assert signal_span_found


def test_transaction_http_method_default(sentry_init, client, capture_events):
    """
    By default OPTIONS and HEAD requests do not create a transaction.
    """
    sentry_init(
        integrations=[DjangoIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get("/nomessage")
    client.options("/nomessage")
    client.head("/nomessage")

    (event,) = events

    assert len(events) == 1
    assert event["request"]["method"] == "GET"


def test_transaction_http_method_custom(sentry_init, client, capture_events):
    sentry_init(
        integrations=[
            DjangoIntegration(
                http_methods_to_capture=(
                    "OPTIONS",
                    "head",
                ),  # capitalization does not matter
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get("/nomessage")
    client.options("/nomessage")
    client.head("/nomessage")

    assert len(events) == 2

    (event1, event2) = events
    assert event1["request"]["method"] == "OPTIONS"
    assert event2["request"]["method"] == "HEAD"


def test_ensures_spotlight_middleware_when_spotlight_is_enabled(sentry_init, settings):
    """
    Test that ensures if Spotlight is enabled, relevant SpotlightMiddleware
    is added to middleware list in settings.
    """
    settings.DEBUG = True
    original_middleware = frozenset(settings.MIDDLEWARE)

    sentry_init(integrations=[DjangoIntegration()], spotlight=True)

    added = frozenset(settings.MIDDLEWARE) ^ original_middleware

    assert "sentry_sdk.spotlight.SpotlightMiddleware" in added


def test_ensures_no_spotlight_middleware_when_env_killswitch_is_false(
    monkeypatch, sentry_init, settings
):
    """
    Test that ensures if Spotlight is enabled, but is set to a falsy value
    the relevant SpotlightMiddleware is NOT added to middleware list in settings.
    """
    settings.DEBUG = True
    monkeypatch.setenv("SENTRY_SPOTLIGHT_ON_ERROR", "no")

    original_middleware = frozenset(settings.MIDDLEWARE)

    sentry_init(integrations=[DjangoIntegration()], spotlight=True)

    added = frozenset(settings.MIDDLEWARE) ^ original_middleware

    assert "sentry_sdk.spotlight.SpotlightMiddleware" not in added


def test_ensures_no_spotlight_middleware_when_no_spotlight(
    monkeypatch, sentry_init, settings
):
    """
    Test that ensures if Spotlight is not enabled
    the relevant SpotlightMiddleware is NOT added to middleware list in settings.
    """
    settings.DEBUG = True

    # We should NOT have the middleware even if the env var is truthy if Spotlight is off
    monkeypatch.setenv("SENTRY_SPOTLIGHT_ON_ERROR", "1")

    original_middleware = frozenset(settings.MIDDLEWARE)

    sentry_init(integrations=[DjangoIntegration()], spotlight=False)

    added = frozenset(settings.MIDDLEWARE) ^ original_middleware

    assert "sentry_sdk.spotlight.SpotlightMiddleware" not in added


def test_get_frame_name_when_in_lazy_object():
    allowed_to_init = False

    class SimpleLazyObjectWrapper(SimpleLazyObject):
        def unproxied_method(self):
            """
            For testing purposes. We inject a method on the SimpleLazyObject
            class so if python is executing this method, we should get
            this class instead of the wrapped class and avoid evaluating
            the wrapped object too early.
            """
            return inspect.currentframe()

    class GetFrame:
        def __init__(self):
            assert allowed_to_init, "GetFrame not permitted to initialize yet"

        def proxied_method(self):
            """
            For testing purposes. We add an proxied method on the instance
            class so if python is executing this method, we should get
            this class instead of the wrapper class.
            """
            return inspect.currentframe()

    instance = SimpleLazyObjectWrapper(lambda: GetFrame())

    assert get_frame_name(instance.unproxied_method()) == (
        "SimpleLazyObjectWrapper.unproxied_method"
        if sys.version_info < (3, 11)
        else "test_get_frame_name_when_in_lazy_object..SimpleLazyObjectWrapper.unproxied_method"
    )

    # Now that we're about to access an instance method on the wrapped class,
    # we should permit initializing it
    allowed_to_init = True

    assert get_frame_name(instance.proxied_method()) == (
        "GetFrame.proxied_method"
        if sys.version_info < (3, 11)
        else "test_get_frame_name_when_in_lazy_object..GetFrame.proxied_method"
    )
sentry-python-2.18.0/tests/integrations/django/test_cache_module.py000066400000000000000000000502061471214654000256070ustar00rootroot00000000000000import os
import random
import uuid

import pytest
from django import VERSION as DJANGO_VERSION
from werkzeug.test import Client

try:
    from django.urls import reverse
except ImportError:
    from django.core.urlresolvers import reverse

import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.django.caching import _get_span_description
from tests.integrations.django.myapp.wsgi import application
from tests.integrations.django.utils import pytest_mark_django_db_decorator


DJANGO_VERSION = DJANGO_VERSION[:2]


@pytest.fixture
def client():
    return Client(application)


@pytest.fixture
def use_django_caching(settings):
    settings.CACHES = {
        "default": {
            "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
            "LOCATION": "unique-snowflake-%s" % random.randint(1, 1000000),
        }
    }


@pytest.fixture
def use_django_caching_with_middlewares(settings):
    settings.CACHES = {
        "default": {
            "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
            "LOCATION": "unique-snowflake-%s" % random.randint(1, 1000000),
        }
    }
    if hasattr(settings, "MIDDLEWARE"):
        middleware = settings.MIDDLEWARE
    elif hasattr(settings, "MIDDLEWARE_CLASSES"):
        middleware = settings.MIDDLEWARE_CLASSES
    else:
        middleware = None

    if middleware is not None:
        middleware.insert(0, "django.middleware.cache.UpdateCacheMiddleware")
        middleware.append("django.middleware.cache.FetchFromCacheMiddleware")


@pytest.fixture
def use_django_caching_with_port(settings):
    settings.CACHES = {
        "default": {
            "BACKEND": "django.core.cache.backends.dummy.DummyCache",
            "LOCATION": "redis://username:password@127.0.0.1:6379",
        }
    }


@pytest.fixture
def use_django_caching_without_port(settings):
    settings.CACHES = {
        "default": {
            "BACKEND": "django.core.cache.backends.dummy.DummyCache",
            "LOCATION": "redis://example.com",
        }
    }


@pytest.fixture
def use_django_caching_with_cluster(settings):
    settings.CACHES = {
        "default": {
            "BACKEND": "django.core.cache.backends.dummy.DummyCache",
            "LOCATION": [
                "redis://127.0.0.1:6379",
                "redis://127.0.0.2:6378",
                "redis://127.0.0.3:6377",
            ],
        }
    }


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_disabled_middleware(
    sentry_init, client, capture_events, use_django_caching_with_middlewares
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=False,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("not_cached_view"))
    client.get(reverse("not_cached_view"))

    (first_event, second_event) = events
    assert len(first_event["spans"]) == 0
    assert len(second_event["spans"]) == 0


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_disabled_decorator(
    sentry_init, client, capture_events, use_django_caching
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=False,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("cached_view"))
    client.get(reverse("cached_view"))

    (first_event, second_event) = events
    assert len(first_event["spans"]) == 0
    assert len(second_event["spans"]) == 0


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_disabled_templatetag(
    sentry_init, client, capture_events, use_django_caching
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=False,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("view_with_cached_template_fragment"))
    client.get(reverse("view_with_cached_template_fragment"))

    (first_event, second_event) = events
    assert len(first_event["spans"]) == 0
    assert len(second_event["spans"]) == 0


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_middleware(
    sentry_init, client, capture_events, use_django_caching_with_middlewares
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )

    client.application.load_middleware()
    events = capture_events()

    client.get(reverse("not_cached_view"))
    client.get(reverse("not_cached_view"))

    (first_event, second_event) = events
    # first_event - cache.get
    assert first_event["spans"][0]["op"] == "cache.get"
    assert first_event["spans"][0]["description"].startswith(
        "views.decorators.cache.cache_header."
    )
    assert first_event["spans"][0]["data"]["network.peer.address"] is not None
    assert first_event["spans"][0]["data"]["cache.key"][0].startswith(
        "views.decorators.cache.cache_header."
    )
    assert not first_event["spans"][0]["data"]["cache.hit"]
    assert "cache.item_size" not in first_event["spans"][0]["data"]
    # first_event - cache.put
    assert first_event["spans"][1]["op"] == "cache.put"
    assert first_event["spans"][1]["description"].startswith(
        "views.decorators.cache.cache_header."
    )
    assert first_event["spans"][1]["data"]["network.peer.address"] is not None
    assert first_event["spans"][1]["data"]["cache.key"][0].startswith(
        "views.decorators.cache.cache_header."
    )
    assert "cache.hit" not in first_event["spans"][1]["data"]
    assert first_event["spans"][1]["data"]["cache.item_size"] == 2
    # second_event - cache.get
    assert second_event["spans"][0]["op"] == "cache.get"
    assert second_event["spans"][0]["description"].startswith(
        "views.decorators.cache.cache_header."
    )
    assert second_event["spans"][0]["data"]["network.peer.address"] is not None
    assert second_event["spans"][0]["data"]["cache.key"][0].startswith(
        "views.decorators.cache.cache_header."
    )
    assert not second_event["spans"][0]["data"]["cache.hit"]
    assert "cache.item_size" not in second_event["spans"][0]["data"]
    # second_event - cache.get 2
    assert second_event["spans"][1]["op"] == "cache.get"
    assert second_event["spans"][1]["description"].startswith(
        "views.decorators.cache.cache_page."
    )
    assert second_event["spans"][1]["data"]["network.peer.address"] is not None
    assert second_event["spans"][1]["data"]["cache.key"][0].startswith(
        "views.decorators.cache.cache_page."
    )
    assert second_event["spans"][1]["data"]["cache.hit"]
    assert second_event["spans"][1]["data"]["cache.item_size"] == 58


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_decorator(sentry_init, client, capture_events, use_django_caching):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("cached_view"))
    client.get(reverse("cached_view"))

    (first_event, second_event) = events
    # first_event - cache.get
    assert first_event["spans"][0]["op"] == "cache.get"
    assert first_event["spans"][0]["description"].startswith(
        "views.decorators.cache.cache_header."
    )
    assert first_event["spans"][0]["data"]["network.peer.address"] is not None
    assert first_event["spans"][0]["data"]["cache.key"][0].startswith(
        "views.decorators.cache.cache_header."
    )
    assert not first_event["spans"][0]["data"]["cache.hit"]
    assert "cache.item_size" not in first_event["spans"][0]["data"]
    # first_event - cache.put
    assert first_event["spans"][1]["op"] == "cache.put"
    assert first_event["spans"][1]["description"].startswith(
        "views.decorators.cache.cache_header."
    )
    assert first_event["spans"][1]["data"]["network.peer.address"] is not None
    assert first_event["spans"][1]["data"]["cache.key"][0].startswith(
        "views.decorators.cache.cache_header."
    )
    assert "cache.hit" not in first_event["spans"][1]["data"]
    assert first_event["spans"][1]["data"]["cache.item_size"] == 2
    # second_event - cache.get
    assert second_event["spans"][1]["op"] == "cache.get"
    assert second_event["spans"][1]["description"].startswith(
        "views.decorators.cache.cache_page."
    )
    assert second_event["spans"][1]["data"]["network.peer.address"] is not None
    assert second_event["spans"][1]["data"]["cache.key"][0].startswith(
        "views.decorators.cache.cache_page."
    )
    assert second_event["spans"][1]["data"]["cache.hit"]
    assert second_event["spans"][1]["data"]["cache.item_size"] == 58


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_templatetag(
    sentry_init, client, capture_events, use_django_caching
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("view_with_cached_template_fragment"))
    client.get(reverse("view_with_cached_template_fragment"))

    (first_event, second_event) = events
    assert len(first_event["spans"]) == 2
    # first_event - cache.get
    assert first_event["spans"][0]["op"] == "cache.get"
    assert first_event["spans"][0]["description"].startswith(
        "template.cache.some_identifier."
    )
    assert first_event["spans"][0]["data"]["network.peer.address"] is not None
    assert first_event["spans"][0]["data"]["cache.key"][0].startswith(
        "template.cache.some_identifier."
    )
    assert not first_event["spans"][0]["data"]["cache.hit"]
    assert "cache.item_size" not in first_event["spans"][0]["data"]
    # first_event - cache.put
    assert first_event["spans"][1]["op"] == "cache.put"
    assert first_event["spans"][1]["description"].startswith(
        "template.cache.some_identifier."
    )
    assert first_event["spans"][1]["data"]["network.peer.address"] is not None
    assert first_event["spans"][1]["data"]["cache.key"][0].startswith(
        "template.cache.some_identifier."
    )
    assert "cache.hit" not in first_event["spans"][1]["data"]
    assert first_event["spans"][1]["data"]["cache.item_size"] == 51
    # second_event - cache.get
    assert second_event["spans"][0]["op"] == "cache.get"
    assert second_event["spans"][0]["description"].startswith(
        "template.cache.some_identifier."
    )
    assert second_event["spans"][0]["data"]["network.peer.address"] is not None
    assert second_event["spans"][0]["data"]["cache.key"][0].startswith(
        "template.cache.some_identifier."
    )
    assert second_event["spans"][0]["data"]["cache.hit"]
    assert second_event["spans"][0]["data"]["cache.item_size"] == 51


@pytest.mark.parametrize(
    "method_name, args, kwargs, expected_description",
    [
        (None, None, None, ""),
        ("get", None, None, ""),
        ("get", [], {}, ""),
        ("get", ["bla", "blub", "foo"], {}, "bla"),
        ("get", [uuid.uuid4().bytes], {}, ""),
        (
            "get_many",
            [["bla1", "bla2", "bla3"], "blub", "foo"],
            {},
            "bla1, bla2, bla3",
        ),
        (
            "get_many",
            [["bla:1", "bla:2", "bla:3"], "blub", "foo"],
            {"key": "bar"},
            "bla:1, bla:2, bla:3",
        ),
        ("get", [], {"key": "bar"}, "bar"),
        (
            "get",
            "something",
            {},
            "s",
        ),  # this case should never happen, just making sure that we are not raising an exception in that case.
    ],
)
def test_cache_spans_get_span_description(
    method_name, args, kwargs, expected_description
):
    assert _get_span_description(method_name, args, kwargs) == expected_description


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_cache_spans_location_with_port(
    sentry_init, client, capture_events, use_django_caching_with_port
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("cached_view"))
    client.get(reverse("cached_view"))

    for event in events:
        for span in event["spans"]:
            assert (
                span["data"]["network.peer.address"] == "redis://127.0.0.1"
            )  # Note: the username/password are not included in the address
            assert span["data"]["network.peer.port"] == 6379


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_cache_spans_location_without_port(
    sentry_init, client, capture_events, use_django_caching_without_port
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("cached_view"))
    client.get(reverse("cached_view"))

    for event in events:
        for span in event["spans"]:
            assert span["data"]["network.peer.address"] == "redis://example.com"
            assert "network.peer.port" not in span["data"]


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_cache_spans_location_with_cluster(
    sentry_init, client, capture_events, use_django_caching_with_cluster
):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("cached_view"))
    client.get(reverse("cached_view"))

    for event in events:
        for span in event["spans"]:
            # because it is a cluster we do not know what host is actually accessed, so we omit the data
            assert "network.peer.address" not in span["data"].keys()
            assert "network.peer.port" not in span["data"].keys()


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_cache_spans_item_size(sentry_init, client, capture_events, use_django_caching):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("cached_view"))
    client.get(reverse("cached_view"))

    (first_event, second_event) = events
    assert len(first_event["spans"]) == 3
    assert first_event["spans"][0]["op"] == "cache.get"
    assert not first_event["spans"][0]["data"]["cache.hit"]
    assert "cache.item_size" not in first_event["spans"][0]["data"]

    assert first_event["spans"][1]["op"] == "cache.put"
    assert "cache.hit" not in first_event["spans"][1]["data"]
    assert first_event["spans"][1]["data"]["cache.item_size"] == 2

    assert first_event["spans"][2]["op"] == "cache.put"
    assert "cache.hit" not in first_event["spans"][2]["data"]
    assert first_event["spans"][2]["data"]["cache.item_size"] == 58

    assert len(second_event["spans"]) == 2
    assert second_event["spans"][0]["op"] == "cache.get"
    assert not second_event["spans"][0]["data"]["cache.hit"]
    assert "cache.item_size" not in second_event["spans"][0]["data"]

    assert second_event["spans"][1]["op"] == "cache.get"
    assert second_event["spans"][1]["data"]["cache.hit"]
    assert second_event["spans"][1]["data"]["cache.item_size"] == 58


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_cache_spans_get_many(sentry_init, capture_events, use_django_caching):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    id = os.getpid()

    from django.core.cache import cache

    with sentry_sdk.start_transaction():
        cache.get_many([f"S{id}", f"S{id+1}"])
        cache.set(f"S{id}", "Sensitive1")
        cache.get_many([f"S{id}", f"S{id+1}"])

    (transaction,) = events
    assert len(transaction["spans"]) == 7

    assert transaction["spans"][0]["op"] == "cache.get"
    assert transaction["spans"][0]["description"] == f"S{id}, S{id+1}"

    assert transaction["spans"][1]["op"] == "cache.get"
    assert transaction["spans"][1]["description"] == f"S{id}"

    assert transaction["spans"][2]["op"] == "cache.get"
    assert transaction["spans"][2]["description"] == f"S{id+1}"

    assert transaction["spans"][3]["op"] == "cache.put"
    assert transaction["spans"][3]["description"] == f"S{id}"

    assert transaction["spans"][4]["op"] == "cache.get"
    assert transaction["spans"][4]["description"] == f"S{id}, S{id+1}"

    assert transaction["spans"][5]["op"] == "cache.get"
    assert transaction["spans"][5]["description"] == f"S{id}"

    assert transaction["spans"][6]["op"] == "cache.get"
    assert transaction["spans"][6]["description"] == f"S{id+1}"


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_cache_spans_set_many(sentry_init, capture_events, use_django_caching):
    sentry_init(
        integrations=[
            DjangoIntegration(
                cache_spans=True,
                middleware_spans=False,
                signals_spans=False,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    id = os.getpid()

    from django.core.cache import cache

    with sentry_sdk.start_transaction():
        cache.set_many({f"S{id}": "Sensitive1", f"S{id+1}": "Sensitive2"})
        cache.get(f"S{id}")

    (transaction,) = events
    assert len(transaction["spans"]) == 4

    assert transaction["spans"][0]["op"] == "cache.put"
    assert transaction["spans"][0]["description"] == f"S{id}, S{id+1}"

    assert transaction["spans"][1]["op"] == "cache.put"
    assert transaction["spans"][1]["description"] == f"S{id}"

    assert transaction["spans"][2]["op"] == "cache.put"
    assert transaction["spans"][2]["description"] == f"S{id+1}"

    assert transaction["spans"][3]["op"] == "cache.get"
    assert transaction["spans"][3]["description"] == f"S{id}"


@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION <= (1, 11), reason="Requires Django > 1.11")
def test_span_origin_cache(sentry_init, client, capture_events, use_django_caching):
    sentry_init(
        integrations=[
            DjangoIntegration(
                middleware_spans=True,
                signals_spans=True,
                cache_spans=True,
            )
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client.get(reverse("cached_view"))

    (transaction,) = events

    assert transaction["contexts"]["trace"]["origin"] == "auto.http.django"

    cache_span_found = False
    for span in transaction["spans"]:
        assert span["origin"] == "auto.http.django"
        if span["op"].startswith("cache."):
            cache_span_found = True

    assert cache_span_found
sentry-python-2.18.0/tests/integrations/django/test_data_scrubbing.py000066400000000000000000000046101471214654000261440ustar00rootroot00000000000000import pytest

from werkzeug.test import Client

from sentry_sdk.integrations.django import DjangoIntegration
from tests.conftest import werkzeug_set_cookie
from tests.integrations.django.myapp.wsgi import application
from tests.integrations.django.utils import pytest_mark_django_db_decorator

try:
    from django.urls import reverse
except ImportError:
    from django.core.urlresolvers import reverse


@pytest.fixture
def client():
    return Client(application)


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_scrub_django_session_cookies_removed(
    sentry_init,
    client,
    capture_events,
):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=False)
    events = capture_events()
    werkzeug_set_cookie(client, "localhost", "sessionid", "123")
    werkzeug_set_cookie(client, "localhost", "csrftoken", "456")
    werkzeug_set_cookie(client, "localhost", "foo", "bar")
    client.get(reverse("view_exc"))

    (event,) = events
    assert "cookies" not in event["request"]


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_scrub_django_session_cookies_filtered(
    sentry_init,
    client,
    capture_events,
):
    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    events = capture_events()
    werkzeug_set_cookie(client, "localhost", "sessionid", "123")
    werkzeug_set_cookie(client, "localhost", "csrftoken", "456")
    werkzeug_set_cookie(client, "localhost", "foo", "bar")
    client.get(reverse("view_exc"))

    (event,) = events
    assert event["request"]["cookies"] == {
        "sessionid": "[Filtered]",
        "csrftoken": "[Filtered]",
        "foo": "bar",
    }


@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_scrub_django_custom_session_cookies_filtered(
    sentry_init,
    client,
    capture_events,
    settings,
):
    settings.SESSION_COOKIE_NAME = "my_sess"
    settings.CSRF_COOKIE_NAME = "csrf_secret"

    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
    events = capture_events()
    werkzeug_set_cookie(client, "localhost", "my_sess", "123")
    werkzeug_set_cookie(client, "localhost", "csrf_secret", "456")
    werkzeug_set_cookie(client, "localhost", "foo", "bar")
    client.get(reverse("view_exc"))

    (event,) = events
    assert event["request"]["cookies"] == {
        "my_sess": "[Filtered]",
        "csrf_secret": "[Filtered]",
        "foo": "bar",
    }
sentry-python-2.18.0/tests/integrations/django/test_db_query_data.py000066400000000000000000000407341471214654000260070ustar00rootroot00000000000000import os

import pytest
from datetime import datetime
from unittest import mock

from django import VERSION as DJANGO_VERSION
from django.db import connections

try:
    from django.urls import reverse
except ImportError:
    from django.core.urlresolvers import reverse

from werkzeug.test import Client

from sentry_sdk import start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.tracing_utils import record_sql_queries

from tests.conftest import unpack_werkzeug_response
from tests.integrations.django.utils import pytest_mark_django_db_decorator
from tests.integrations.django.myapp.wsgi import application


@pytest.fixture
def client():
    return Client(application)


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source_disabled(sentry_init, client, capture_events):
    sentry_options = {
        "integrations": [DjangoIntegration()],
        "send_default_pii": True,
        "traces_sample_rate": 1.0,
        "enable_db_query_source": False,
        "db_query_source_threshold_ms": 0,
    }

    sentry_init(**sentry_options)

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO not in data
            assert SPANDATA.CODE_NAMESPACE not in data
            assert SPANDATA.CODE_FILEPATH not in data
            assert SPANDATA.CODE_FUNCTION not in data
            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
@pytest.mark.parametrize("enable_db_query_source", [None, True])
def test_query_source_enabled(
    sentry_init, client, capture_events, enable_db_query_source
):
    sentry_options = {
        "integrations": [DjangoIntegration()],
        "send_default_pii": True,
        "traces_sample_rate": 1.0,
        "db_query_source_threshold_ms": 0,
    }

    if enable_db_query_source is not None:
        sentry_options["enable_db_query_source"] = enable_db_query_source

    sentry_init(**sentry_options)

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
        traces_sample_rate=1.0,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
    )

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0

            assert (
                data.get(SPANDATA.CODE_NAMESPACE)
                == "tests.integrations.django.myapp.views"
            )
            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                "tests/integrations/django/myapp/views.py"
            )

            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
            assert is_relative_path

            assert data.get(SPANDATA.CODE_FUNCTION) == "postgres_select_orm"

            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source_with_module_in_search_path(sentry_init, client, capture_events):
    """
    Test that query source is relative to the path of the module it ran in
    """
    client = Client(application)

    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
        traces_sample_rate=1.0,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
    )

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    _, status, _ = unpack_werkzeug_response(
        client.get(reverse("postgres_select_slow_from_supplement"))
    )
    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0
            assert data.get(SPANDATA.CODE_NAMESPACE) == "django_helpers.views"
            assert data.get(SPANDATA.CODE_FILEPATH) == "django_helpers/views.py"

            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
            assert is_relative_path

            assert data.get(SPANDATA.CODE_FUNCTION) == "postgres_select_orm"

            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source_with_in_app_exclude(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
        traces_sample_rate=1.0,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
        in_app_exclude=["tests.integrations.django.myapp.views"],
    )

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0

            if DJANGO_VERSION >= (1, 11):
                assert (
                    data.get(SPANDATA.CODE_NAMESPACE)
                    == "tests.integrations.django.myapp.settings"
                )
                assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                    "tests/integrations/django/myapp/settings.py"
                )
                assert data.get(SPANDATA.CODE_FUNCTION) == "middleware"
            else:
                assert (
                    data.get(SPANDATA.CODE_NAMESPACE)
                    == "tests.integrations.django.test_db_query_data"
                )
                assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                    "tests/integrations/django/test_db_query_data.py"
                )
                assert (
                    data.get(SPANDATA.CODE_FUNCTION)
                    == "test_query_source_with_in_app_exclude"
                )

            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source_with_in_app_include(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
        traces_sample_rate=1.0,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
        in_app_include=["django"],
    )

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0

            assert data.get(SPANDATA.CODE_NAMESPACE) == "django.db.models.sql.compiler"
            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                "django/db/models/sql/compiler.py"
            )
            assert data.get(SPANDATA.CODE_FUNCTION) == "execute_sql"
            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_no_query_source_if_duration_too_short(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
        traces_sample_rate=1.0,
        enable_db_query_source=True,
        db_query_source_threshold_ms=100,
    )

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    class fake_record_sql_queries:  # noqa: N801
        def __init__(self, *args, **kwargs):
            with record_sql_queries(*args, **kwargs) as span:
                self.span = span

            self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0)
            self.span.timestamp = datetime(2024, 1, 1, microsecond=99999)

        def __enter__(self):
            return self.span

        def __exit__(self, type, value, traceback):
            pass

    with mock.patch(
        "sentry_sdk.integrations.django.record_sql_queries",
        fake_record_sql_queries,
    ):
        _, status, _ = unpack_werkzeug_response(
            client.get(reverse("postgres_select_orm"))
        )

    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO not in data
            assert SPANDATA.CODE_NAMESPACE not in data
            assert SPANDATA.CODE_FILEPATH not in data
            assert SPANDATA.CODE_FUNCTION not in data

            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source_if_duration_over_threshold(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
        traces_sample_rate=1.0,
        enable_db_query_source=True,
        db_query_source_threshold_ms=100,
    )

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    class fake_record_sql_queries:  # noqa: N801
        def __init__(self, *args, **kwargs):
            with record_sql_queries(*args, **kwargs) as span:
                self.span = span

            self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0)
            self.span.timestamp = datetime(2024, 1, 1, microsecond=101000)

        def __enter__(self):
            return self.span

        def __exit__(self, type, value, traceback):
            pass

    with mock.patch(
        "sentry_sdk.integrations.django.record_sql_queries",
        fake_record_sql_queries,
    ):
        _, status, _ = unpack_werkzeug_response(
            client.get(reverse("postgres_select_orm"))
        )

    assert status == "200 OK"

    (event,) = events
    for span in event["spans"]:
        if span.get("op") == "db" and "auth_user" in span.get("description"):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0

            assert (
                data.get(SPANDATA.CODE_NAMESPACE)
                == "tests.integrations.django.myapp.views"
            )
            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                "tests/integrations/django/myapp/views.py"
            )

            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
            assert is_relative_path

            assert data.get(SPANDATA.CODE_FUNCTION) == "postgres_select_orm"
            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_db_span_origin_execute(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        traces_sample_rate=1.0,
    )

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    # trigger Django to open a new connection by marking the existing one as None.
    connections["postgres"].connection = None

    events = capture_events()

    client.get(reverse("postgres_select_orm"))

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.django"

    for span in event["spans"]:
        if span["op"] == "db":
            assert span["origin"] == "auto.db.django"
        else:
            assert span["origin"] == "auto.http.django"


@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_db_span_origin_executemany(sentry_init, client, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        traces_sample_rate=1.0,
    )

    events = capture_events()

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    with start_transaction(name="test_transaction"):
        from django.db import connection, transaction

        cursor = connection.cursor()

        query = """UPDATE auth_user SET username = %s where id = %s;"""
        query_list = (
            (
                "test1",
                1,
            ),
            (
                "test2",
                2,
            ),
        )
        cursor.executemany(query, query_list)

        transaction.commit()

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.db.django"
sentry-python-2.18.0/tests/integrations/django/test_middleware.py000066400000000000000000000020661471214654000253150ustar00rootroot00000000000000from typing import Optional

import pytest

from sentry_sdk.integrations.django.middleware import _wrap_middleware


def _sync_capable_middleware_factory(sync_capable):
    # type: (Optional[bool]) -> type
    """Create a middleware class with a sync_capable attribute set to the value passed to the factory.
    If the factory is called with None, the middleware class will not have a sync_capable attribute.
    """
    sc = sync_capable  # rename so we can set sync_capable in the class

    class TestMiddleware:
        nonlocal sc
        if sc is not None:
            sync_capable = sc

    return TestMiddleware


@pytest.mark.parametrize(
    ("middleware", "sync_capable"),
    (
        (_sync_capable_middleware_factory(True), True),
        (_sync_capable_middleware_factory(False), False),
        (_sync_capable_middleware_factory(None), True),
    ),
)
def test_wrap_middleware_sync_capable_attribute(middleware, sync_capable):
    wrapped_middleware = _wrap_middleware(middleware, "test_middleware")

    assert wrapped_middleware.sync_capable is sync_capable
sentry-python-2.18.0/tests/integrations/django/test_transactions.py000066400000000000000000000116361471214654000257130ustar00rootroot00000000000000from unittest import mock

import pytest
import django
from django.utils.translation import pgettext_lazy


# django<2.0 has only `url` with regex based patterns.
# django>=2.0 renames `url` to `re_path`, and additionally introduces `path`
# for new style URL patterns, e.g. .
if django.VERSION >= (2, 0):
    from django.urls import path, re_path
    from django.urls.converters import PathConverter
    from django.conf.urls import include
else:
    from django.conf.urls import url as re_path, include

if django.VERSION < (1, 9):
    included_url_conf = (re_path(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "", ""
else:
    included_url_conf = ((re_path(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "")

from sentry_sdk.integrations.django.transactions import RavenResolver


example_url_conf = (
    re_path(r"^api/(?P[\w_-]+)/store/$", lambda x: ""),
    re_path(r"^api/(?P(v1|v2))/author/$", lambda x: ""),
    re_path(
        r"^api/(?P[^\/]+)/product/(?P(?:\d+|[A-Fa-f0-9-]{32,36}))/$",
        lambda x: "",
    ),
    re_path(r"^report/", lambda x: ""),
    re_path(r"^example/", include(included_url_conf)),
)


def test_resolver_no_match():
    resolver = RavenResolver()
    result = resolver.resolve("/foo/bar", example_url_conf)
    assert result is None


def test_resolver_re_path_complex_match():
    resolver = RavenResolver()
    result = resolver.resolve("/api/1234/store/", example_url_conf)
    assert result == "/api/{project_id}/store/"


def test_resolver_re_path_complex_either_match():
    resolver = RavenResolver()
    result = resolver.resolve("/api/v1/author/", example_url_conf)
    assert result == "/api/{version}/author/"
    result = resolver.resolve("/api/v2/author/", example_url_conf)
    assert result == "/api/{version}/author/"


def test_resolver_re_path_included_match():
    resolver = RavenResolver()
    result = resolver.resolve("/example/foo/bar/baz", example_url_conf)
    assert result == "/example/foo/bar/{param}"


def test_resolver_re_path_multiple_groups():
    resolver = RavenResolver()
    result = resolver.resolve(
        "/api/myproject/product/cb4ef1caf3554c34ae134f3c1b3d605f/", example_url_conf
    )
    assert result == "/api/{project_id}/product/{pid}/"


@pytest.mark.skipif(
    django.VERSION < (2, 0),
    reason="Django>=2.0 required for  patterns",
)
def test_resolver_path_group():
    url_conf = (path("api/v2//store/", lambda x: ""),)
    resolver = RavenResolver()
    result = resolver.resolve("/api/v2/1234/store/", url_conf)
    assert result == "/api/v2/{project_id}/store/"


@pytest.mark.skipif(
    django.VERSION < (2, 0),
    reason="Django>=2.0 required for  patterns",
)
def test_resolver_path_multiple_groups():
    url_conf = (path("api/v2//product/", lambda x: ""),)
    resolver = RavenResolver()
    result = resolver.resolve("/api/v2/myproject/product/5689", url_conf)
    assert result == "/api/v2/{project_id}/product/{pid}"


@pytest.mark.skipif(
    django.VERSION < (2, 0),
    reason="Django>=2.0 required for  patterns",
)
@pytest.mark.skipif(
    django.VERSION > (5, 1),
    reason="get_converter removed in 5.1",
)
def test_resolver_path_complex_path_legacy():
    class CustomPathConverter(PathConverter):
        regex = r"[^/]+(/[^/]+){0,2}"

    with mock.patch(
        "django.urls.resolvers.get_converter",
        return_value=CustomPathConverter,
    ):
        url_conf = (path("api/v3/", lambda x: ""),)
        resolver = RavenResolver()
        result = resolver.resolve("/api/v3/abc/def/ghi", url_conf)
        assert result == "/api/v3/{my_path}"


@pytest.mark.skipif(
    django.VERSION < (5, 1),
    reason="get_converters is used in 5.1",
)
def test_resolver_path_complex_path():
    class CustomPathConverter(PathConverter):
        regex = r"[^/]+(/[^/]+){0,2}"

    with mock.patch(
        "django.urls.resolvers.get_converters",
        return_value={"custom_path": CustomPathConverter},
    ):
        url_conf = (path("api/v3/", lambda x: ""),)
        resolver = RavenResolver()
        result = resolver.resolve("/api/v3/abc/def/ghi", url_conf)
        assert result == "/api/v3/{my_path}"


@pytest.mark.skipif(
    django.VERSION < (2, 0),
    reason="Django>=2.0 required for  patterns",
)
def test_resolver_path_no_converter():
    url_conf = (path("api/v4/", lambda x: ""),)
    resolver = RavenResolver()
    result = resolver.resolve("/api/v4/myproject", url_conf)
    assert result == "/api/v4/{project_id}"


@pytest.mark.skipif(
    django.VERSION < (2, 0),
    reason="Django>=2.0 required for path patterns",
)
def test_resolver_path_with_i18n():
    url_conf = (path(pgettext_lazy("url", "pgettext"), lambda x: ""),)
    resolver = RavenResolver()
    result = resolver.resolve("/pgettext", url_conf)
    assert result == "/pgettext"
sentry-python-2.18.0/tests/integrations/django/utils.py000066400000000000000000000013311471214654000232730ustar00rootroot00000000000000from functools import partial

import pytest
import pytest_django


# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that
# requires explicit database allow from failing the test
pytest_mark_django_db_decorator = partial(pytest.mark.django_db)
try:
    pytest_version = tuple(map(int, pytest_django.__version__.split(".")))
    if pytest_version > (4, 2, 0):
        pytest_mark_django_db_decorator = partial(
            pytest.mark.django_db, databases="__all__"
        )
except ValueError:
    if "dev" in pytest_django.__version__:
        pytest_mark_django_db_decorator = partial(
            pytest.mark.django_db, databases="__all__"
        )
except AttributeError:
    pass
sentry-python-2.18.0/tests/integrations/dramatiq/000077500000000000000000000000001471214654000221235ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/dramatiq/__init__.py000066400000000000000000000000571471214654000242360ustar00rootroot00000000000000import pytest

pytest.importorskip("dramatiq")
sentry-python-2.18.0/tests/integrations/dramatiq/test_dramatiq.py000066400000000000000000000134571471214654000253500ustar00rootroot00000000000000import pytest
import uuid

import dramatiq
from dramatiq.brokers.stub import StubBroker

import sentry_sdk
from sentry_sdk.integrations.dramatiq import DramatiqIntegration


@pytest.fixture
def broker(sentry_init):
    sentry_init(integrations=[DramatiqIntegration()])
    broker = StubBroker()
    broker.emit_after("process_boot")
    dramatiq.set_broker(broker)
    yield broker
    broker.flush_all()
    broker.close()


@pytest.fixture
def worker(broker):
    worker = dramatiq.Worker(broker, worker_timeout=100, worker_threads=1)
    worker.start()
    yield worker
    worker.stop()


def test_that_a_single_error_is_captured(broker, worker, capture_events):
    events = capture_events()

    @dramatiq.actor(max_retries=0)
    def dummy_actor(x, y):
        return x / y

    dummy_actor.send(1, 2)
    dummy_actor.send(1, 0)
    broker.join(dummy_actor.queue_name)
    worker.join()

    (event,) = events
    exception = event["exception"]["values"][0]
    assert exception["type"] == "ZeroDivisionError"


def test_that_actor_name_is_set_as_transaction(broker, worker, capture_events):
    events = capture_events()

    @dramatiq.actor(max_retries=0)
    def dummy_actor(x, y):
        return x / y

    dummy_actor.send(1, 0)
    broker.join(dummy_actor.queue_name)
    worker.join()

    (event,) = events
    assert event["transaction"] == "dummy_actor"


def test_that_dramatiq_message_id_is_set_as_extra(broker, worker, capture_events):
    events = capture_events()

    @dramatiq.actor(max_retries=0)
    def dummy_actor(x, y):
        sentry_sdk.capture_message("hi")
        return x / y

    dummy_actor.send(1, 0)
    broker.join(dummy_actor.queue_name)
    worker.join()

    event_message, event_error = events
    assert "dramatiq_message_id" in event_message["extra"]
    assert "dramatiq_message_id" in event_error["extra"]
    assert (
        event_message["extra"]["dramatiq_message_id"]
        == event_error["extra"]["dramatiq_message_id"]
    )
    msg_ids = [e["extra"]["dramatiq_message_id"] for e in events]
    assert all(uuid.UUID(msg_id) and isinstance(msg_id, str) for msg_id in msg_ids)


def test_that_local_variables_are_captured(broker, worker, capture_events):
    events = capture_events()

    @dramatiq.actor(max_retries=0)
    def dummy_actor(x, y):
        foo = 42  # noqa
        return x / y

    dummy_actor.send(1, 2)
    dummy_actor.send(1, 0)
    broker.join(dummy_actor.queue_name)
    worker.join()

    (event,) = events
    exception = event["exception"]["values"][0]
    assert exception["stacktrace"]["frames"][-1]["vars"] == {
        "x": "1",
        "y": "0",
        "foo": "42",
    }


def test_that_messages_are_captured(broker, worker, capture_events):
    events = capture_events()

    @dramatiq.actor(max_retries=0)
    def dummy_actor():
        sentry_sdk.capture_message("hi")

    dummy_actor.send()
    broker.join(dummy_actor.queue_name)
    worker.join()

    (event,) = events
    assert event["message"] == "hi"
    assert event["level"] == "info"
    assert event["transaction"] == "dummy_actor"


def test_that_sub_actor_errors_are_captured(broker, worker, capture_events):
    events = capture_events()

    @dramatiq.actor(max_retries=0)
    def dummy_actor(x, y):
        sub_actor.send(x, y)

    @dramatiq.actor(max_retries=0)
    def sub_actor(x, y):
        return x / y

    dummy_actor.send(1, 2)
    dummy_actor.send(1, 0)
    broker.join(dummy_actor.queue_name)
    worker.join()

    (event,) = events
    assert event["transaction"] == "sub_actor"

    exception = event["exception"]["values"][0]
    assert exception["type"] == "ZeroDivisionError"


def test_that_multiple_errors_are_captured(broker, worker, capture_events):
    events = capture_events()

    @dramatiq.actor(max_retries=0)
    def dummy_actor(x, y):
        return x / y

    dummy_actor.send(1, 0)
    broker.join(dummy_actor.queue_name)
    worker.join()

    dummy_actor.send(1, None)
    broker.join(dummy_actor.queue_name)
    worker.join()

    event1, event2 = events

    assert event1["transaction"] == "dummy_actor"
    exception = event1["exception"]["values"][0]
    assert exception["type"] == "ZeroDivisionError"

    assert event2["transaction"] == "dummy_actor"
    exception = event2["exception"]["values"][0]
    assert exception["type"] == "TypeError"


def test_that_message_data_is_added_as_request(broker, worker, capture_events):
    events = capture_events()

    @dramatiq.actor(max_retries=0)
    def dummy_actor(x, y):
        return x / y

    dummy_actor.send_with_options(
        args=(
            1,
            0,
        ),
        max_retries=0,
    )
    broker.join(dummy_actor.queue_name)
    worker.join()

    (event,) = events

    assert event["transaction"] == "dummy_actor"
    request_data = event["contexts"]["dramatiq"]["data"]
    assert request_data["queue_name"] == "default"
    assert request_data["actor_name"] == "dummy_actor"
    assert request_data["args"] == [1, 0]
    assert request_data["kwargs"] == {}
    assert request_data["options"]["max_retries"] == 0
    assert uuid.UUID(request_data["message_id"])
    assert isinstance(request_data["message_timestamp"], int)


def test_that_expected_exceptions_are_not_captured(broker, worker, capture_events):
    events = capture_events()

    class ExpectedException(Exception):
        pass

    @dramatiq.actor(max_retries=0, throws=ExpectedException)
    def dummy_actor():
        raise ExpectedException

    dummy_actor.send()
    broker.join(dummy_actor.queue_name)
    worker.join()

    assert events == []


def test_that_retry_exceptions_are_not_captured(broker, worker, capture_events):
    events = capture_events()

    @dramatiq.actor(max_retries=2)
    def dummy_actor():
        raise dramatiq.errors.Retry("Retrying", delay=100)

    dummy_actor.send()
    broker.join(dummy_actor.queue_name)
    worker.join()

    assert events == []
sentry-python-2.18.0/tests/integrations/excepthook/000077500000000000000000000000001471214654000224725ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/excepthook/test_excepthook.py000066400000000000000000000046041471214654000262600ustar00rootroot00000000000000import pytest
import sys
import subprocess

from textwrap import dedent


TEST_PARAMETERS = [("", "HttpTransport")]

if sys.version_info >= (3, 8):
    TEST_PARAMETERS.append(('_experiments={"transport_http2": True}', "Http2Transport"))


@pytest.mark.parametrize("options, transport", TEST_PARAMETERS)
def test_excepthook(tmpdir, options, transport):
    app = tmpdir.join("app.py")
    app.write(
        dedent(
            """
    from sentry_sdk import init, transport

    def capture_envelope(self, envelope):
        print("capture_envelope was called")
        event = envelope.get_event()
        if event is not None:
            print(event)

    transport.{transport}.capture_envelope = capture_envelope

    init("http://foobar@localhost/123", {options})

    frame_value = "LOL"

    1/0
    """.format(
                transport=transport, options=options
            )
        )
    )

    with pytest.raises(subprocess.CalledProcessError) as excinfo:
        subprocess.check_output([sys.executable, str(app)], stderr=subprocess.STDOUT)

    output = excinfo.value.output
    print(output)

    assert b"ZeroDivisionError" in output
    assert b"LOL" in output
    assert b"capture_envelope was called" in output


@pytest.mark.parametrize("options, transport", TEST_PARAMETERS)
def test_always_value_excepthook(tmpdir, options, transport):
    app = tmpdir.join("app.py")
    app.write(
        dedent(
            """
    import sys
    from sentry_sdk import init, transport
    from sentry_sdk.integrations.excepthook import ExcepthookIntegration

    def capture_envelope(self, envelope):
        print("capture_envelope was called")
        event = envelope.get_event()
        if event is not None:
            print(event)

    transport.{transport}.capture_envelope = capture_envelope

    sys.ps1 = "always_value_test"
    init("http://foobar@localhost/123",
        integrations=[ExcepthookIntegration(always_run=True)],
        {options}
    )

    frame_value = "LOL"

    1/0
    """.format(
                transport=transport, options=options
            )
        )
    )

    with pytest.raises(subprocess.CalledProcessError) as excinfo:
        subprocess.check_output([sys.executable, str(app)], stderr=subprocess.STDOUT)

    output = excinfo.value.output
    print(output)

    assert b"ZeroDivisionError" in output
    assert b"LOL" in output
    assert b"capture_envelope was called" in output
sentry-python-2.18.0/tests/integrations/falcon/000077500000000000000000000000001471214654000215635ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/falcon/__init__.py000066400000000000000000000000551471214654000236740ustar00rootroot00000000000000import pytest

pytest.importorskip("falcon")
sentry-python-2.18.0/tests/integrations/falcon/test_falcon.py000066400000000000000000000302451471214654000244420ustar00rootroot00000000000000import logging

import pytest

import falcon
import falcon.testing
import sentry_sdk
from sentry_sdk.integrations.falcon import FalconIntegration
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.utils import parse_version


try:
    import falcon.asgi
except ImportError:
    pass
else:
    import falcon.inspect  # We only need this module for the ASGI test


FALCON_VERSION = parse_version(falcon.__version__)


@pytest.fixture
def make_app(sentry_init):
    def inner():
        class MessageResource:
            def on_get(self, req, resp):
                sentry_sdk.capture_message("hi")
                resp.media = "hi"

        class MessageByIdResource:
            def on_get(self, req, resp, message_id):
                sentry_sdk.capture_message("hi")
                resp.media = "hi"

        class CustomError(Exception):
            pass

        class CustomErrorResource:
            def on_get(self, req, resp):
                raise CustomError()

        def custom_error_handler(*args, **kwargs):
            raise falcon.HTTPError(status=falcon.HTTP_400)

        app = falcon.API()
        app.add_route("/message", MessageResource())
        app.add_route("/message/{message_id:int}", MessageByIdResource())
        app.add_route("/custom-error", CustomErrorResource())

        app.add_error_handler(CustomError, custom_error_handler)

        return app

    return inner


@pytest.fixture
def make_client(make_app):
    def inner():
        app = make_app()
        return falcon.testing.TestClient(app)

    return inner


def test_has_context(sentry_init, capture_events, make_client):
    sentry_init(integrations=[FalconIntegration()])
    events = capture_events()

    client = make_client()
    response = client.simulate_get("/message")
    assert response.status == falcon.HTTP_200

    (event,) = events
    assert event["transaction"] == "/message"  # Falcon URI template
    assert "data" not in event["request"]
    assert event["request"]["url"] == "http://falconframework.org/message"


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        ("/message", "uri_template", "/message", "route"),
        ("/message", "path", "/message", "url"),
        ("/message/123456", "uri_template", "/message/{message_id:int}", "route"),
        ("/message/123456", "path", "/message/123456", "url"),
    ],
)
def test_transaction_style(
    sentry_init,
    make_client,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    integration = FalconIntegration(transaction_style=transaction_style)
    sentry_init(integrations=[integration])
    events = capture_events()

    client = make_client()
    response = client.simulate_get(url)
    assert response.status == falcon.HTTP_200

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


def test_unhandled_errors(sentry_init, capture_exceptions, capture_events):
    sentry_init(integrations=[FalconIntegration()])

    class Resource:
        def on_get(self, req, resp):
            1 / 0

    app = falcon.API()
    app.add_route("/", Resource())

    exceptions = capture_exceptions()
    events = capture_events()

    client = falcon.testing.TestClient(app)

    try:
        client.simulate_get("/")
    except ZeroDivisionError:
        pass

    (exc,) = exceptions
    assert isinstance(exc, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon"
    assert " by zero" in event["exception"]["values"][0]["value"]


def test_raised_5xx_errors(sentry_init, capture_exceptions, capture_events):
    sentry_init(integrations=[FalconIntegration()])

    class Resource:
        def on_get(self, req, resp):
            raise falcon.HTTPError(falcon.HTTP_502)

    app = falcon.API()
    app.add_route("/", Resource())

    exceptions = capture_exceptions()
    events = capture_events()

    client = falcon.testing.TestClient(app)
    client.simulate_get("/")

    (exc,) = exceptions
    assert isinstance(exc, falcon.HTTPError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon"
    assert event["exception"]["values"][0]["type"] == "HTTPError"


def test_raised_4xx_errors(sentry_init, capture_exceptions, capture_events):
    sentry_init(integrations=[FalconIntegration()])

    class Resource:
        def on_get(self, req, resp):
            raise falcon.HTTPError(falcon.HTTP_400)

    app = falcon.API()
    app.add_route("/", Resource())

    exceptions = capture_exceptions()
    events = capture_events()

    client = falcon.testing.TestClient(app)
    client.simulate_get("/")

    assert len(exceptions) == 0
    assert len(events) == 0


def test_http_status(sentry_init, capture_exceptions, capture_events):
    """
    This just demonstrates, that if Falcon raises a HTTPStatus with code 500
    (instead of a HTTPError with code 500) Sentry will not capture it.
    """
    sentry_init(integrations=[FalconIntegration()])

    class Resource:
        def on_get(self, req, resp):
            raise falcon.http_status.HTTPStatus(falcon.HTTP_508)

    app = falcon.API()
    app.add_route("/", Resource())

    exceptions = capture_exceptions()
    events = capture_events()

    client = falcon.testing.TestClient(app)
    client.simulate_get("/")

    assert len(exceptions) == 0
    assert len(events) == 0


def test_falcon_large_json_request(sentry_init, capture_events):
    sentry_init(integrations=[FalconIntegration()])

    data = {"foo": {"bar": "a" * 2000}}

    class Resource:
        def on_post(self, req, resp):
            assert req.media == data
            sentry_sdk.capture_message("hi")
            resp.media = "ok"

    app = falcon.API()
    app.add_route("/", Resource())

    events = capture_events()

    client = falcon.testing.TestClient(app)
    response = client.simulate_post("/", json=data)
    assert response.status == falcon.HTTP_200

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]["bar"]) == 1024


@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_falcon_empty_json_request(sentry_init, capture_events, data):
    sentry_init(integrations=[FalconIntegration()])

    class Resource:
        def on_post(self, req, resp):
            assert req.media == data
            sentry_sdk.capture_message("hi")
            resp.media = "ok"

    app = falcon.API()
    app.add_route("/", Resource())

    events = capture_events()

    client = falcon.testing.TestClient(app)
    response = client.simulate_post("/", json=data)
    assert response.status == falcon.HTTP_200

    (event,) = events
    assert event["request"]["data"] == data


def test_falcon_raw_data_request(sentry_init, capture_events):
    sentry_init(integrations=[FalconIntegration()])

    class Resource:
        def on_post(self, req, resp):
            sentry_sdk.capture_message("hi")
            resp.media = "ok"

    app = falcon.API()
    app.add_route("/", Resource())

    events = capture_events()

    client = falcon.testing.TestClient(app)
    response = client.simulate_post("/", body="hi")
    assert response.status == falcon.HTTP_200

    (event,) = events
    assert event["request"]["headers"]["Content-Length"] == "2"
    assert event["request"]["data"] == ""


def test_logging(sentry_init, capture_events):
    sentry_init(
        integrations=[FalconIntegration(), LoggingIntegration(event_level="ERROR")]
    )

    logger = logging.getLogger()

    app = falcon.API()

    class Resource:
        def on_get(self, req, resp):
            logger.error("hi")
            resp.media = "ok"

    app.add_route("/", Resource())

    events = capture_events()

    client = falcon.testing.TestClient(app)
    client.simulate_get("/")

    (event,) = events
    assert event["level"] == "error"


def test_500(sentry_init):
    sentry_init(integrations=[FalconIntegration()])

    app = falcon.API()

    class Resource:
        def on_get(self, req, resp):
            1 / 0

    app.add_route("/", Resource())

    def http500_handler(ex, req, resp, params):
        sentry_sdk.capture_exception(ex)
        resp.media = {"message": "Sentry error."}

    app.add_error_handler(Exception, http500_handler)

    client = falcon.testing.TestClient(app)
    response = client.simulate_get("/")

    assert response.json == {"message": "Sentry error."}


def test_error_in_errorhandler(sentry_init, capture_events):
    sentry_init(integrations=[FalconIntegration()])

    app = falcon.API()

    class Resource:
        def on_get(self, req, resp):
            raise ValueError()

    app.add_route("/", Resource())

    def http500_handler(ex, req, resp, params):
        1 / 0

    app.add_error_handler(Exception, http500_handler)

    events = capture_events()

    client = falcon.testing.TestClient(app)

    with pytest.raises(ZeroDivisionError):
        client.simulate_get("/")

    (event,) = events

    last_ex_values = event["exception"]["values"][-1]
    assert last_ex_values["type"] == "ZeroDivisionError"
    assert last_ex_values["stacktrace"]["frames"][-1]["vars"]["ex"] == "ValueError()"


def test_bad_request_not_captured(sentry_init, capture_events):
    sentry_init(integrations=[FalconIntegration()])
    events = capture_events()

    app = falcon.API()

    class Resource:
        def on_get(self, req, resp):
            raise falcon.HTTPBadRequest()

    app.add_route("/", Resource())

    client = falcon.testing.TestClient(app)

    client.simulate_get("/")

    assert not events


def test_does_not_leak_scope(sentry_init, capture_events):
    sentry_init(integrations=[FalconIntegration()])
    events = capture_events()

    sentry_sdk.get_isolation_scope().set_tag("request_data", False)

    app = falcon.API()

    class Resource:
        def on_get(self, req, resp):
            sentry_sdk.get_isolation_scope().set_tag("request_data", True)

            def generator():
                for row in range(1000):
                    assert sentry_sdk.get_isolation_scope()._tags["request_data"]

                    yield (str(row) + "\n").encode()

            resp.stream = generator()

    app.add_route("/", Resource())

    client = falcon.testing.TestClient(app)
    response = client.simulate_get("/")

    expected_response = "".join(str(row) + "\n" for row in range(1000))
    assert response.text == expected_response
    assert not events
    assert not sentry_sdk.get_isolation_scope()._tags["request_data"]


@pytest.mark.skipif(
    not hasattr(falcon, "asgi"), reason="This Falcon version lacks ASGI support."
)
def test_falcon_not_breaking_asgi(sentry_init):
    """
    This test simply verifies that the Falcon integration does not break ASGI
    Falcon apps.

    The test does not verify ASGI Falcon support, since our Falcon integration
    currently lacks support for ASGI Falcon apps.
    """
    sentry_init(integrations=[FalconIntegration()])

    asgi_app = falcon.asgi.App()

    try:
        falcon.inspect.inspect_app(asgi_app)
    except TypeError:
        pytest.fail("Falcon integration causing errors in ASGI apps.")


@pytest.mark.skipif(
    (FALCON_VERSION or ()) < (3,),
    reason="The Sentry Falcon integration only supports custom error handlers on Falcon 3+",
)
def test_falcon_custom_error_handler(sentry_init, make_app, capture_events):
    """
    When a custom error handler handles what otherwise would have resulted in a 5xx error,
    changing the HTTP status to a non-5xx status, no error event should be sent to Sentry.
    """
    sentry_init(integrations=[FalconIntegration()])
    events = capture_events()

    app = make_app()
    client = falcon.testing.TestClient(app)

    client.simulate_get("/custom-error")

    assert len(events) == 0


def test_span_origin(sentry_init, capture_events, make_client):
    sentry_init(
        integrations=[FalconIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = make_client()
    client.simulate_get("/message")

    (_, event) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.falcon"
sentry-python-2.18.0/tests/integrations/fastapi/000077500000000000000000000000001471214654000217505ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/fastapi/__init__.py000066400000000000000000000000561471214654000240620ustar00rootroot00000000000000import pytest

pytest.importorskip("fastapi")
sentry-python-2.18.0/tests/integrations/fastapi/test_fastapi.py000066400000000000000000000450641471214654000250210ustar00rootroot00000000000000import json
import logging
import pytest
import threading
import warnings
from unittest import mock

import fastapi
from fastapi import FastAPI, HTTPException, Request
from fastapi.testclient import TestClient
from fastapi.middleware.trustedhost import TrustedHostMiddleware

from sentry_sdk import capture_message
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
from sentry_sdk.integrations.fastapi import FastApiIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration
from sentry_sdk.utils import parse_version


FASTAPI_VERSION = parse_version(fastapi.__version__)

from tests.integrations.starlette import test_starlette


def fastapi_app_factory():
    app = FastAPI()

    @app.get("/error")
    async def _error():
        capture_message("Hi")
        1 / 0
        return {"message": "Hi"}

    @app.get("/message")
    async def _message():
        capture_message("Hi")
        return {"message": "Hi"}

    @app.delete("/nomessage")
    @app.get("/nomessage")
    @app.head("/nomessage")
    @app.options("/nomessage")
    @app.patch("/nomessage")
    @app.post("/nomessage")
    @app.put("/nomessage")
    @app.trace("/nomessage")
    async def _nomessage():
        return {"message": "nothing here..."}

    @app.get("/message/{message_id}")
    async def _message_with_id(message_id):
        capture_message("Hi")
        return {"message": "Hi"}

    @app.get("/sync/thread_ids")
    def _thread_ids_sync():
        return {
            "main": str(threading.main_thread().ident),
            "active": str(threading.current_thread().ident),
        }

    @app.get("/async/thread_ids")
    async def _thread_ids_async():
        return {
            "main": str(threading.main_thread().ident),
            "active": str(threading.current_thread().ident),
        }

    return app


@pytest.mark.asyncio
async def test_response(sentry_init, capture_events):
    # FastAPI is heavily based on Starlette so we also need
    # to enable StarletteIntegration.
    # In the future this will be auto enabled.
    sentry_init(
        integrations=[StarletteIntegration(), FastApiIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=True,
    )

    app = fastapi_app_factory()

    events = capture_events()

    client = TestClient(app)
    response = client.get("/message")

    assert response.json() == {"message": "Hi"}

    assert len(events) == 2

    (message_event, transaction_event) = events
    assert message_event["message"] == "Hi"
    assert transaction_event["transaction"] == "/message"


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        (
            "/message",
            "url",
            "/message",
            "route",
        ),
        (
            "/message",
            "endpoint",
            "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "/message/{message_id}",
            "route",
        ),
        (
            "/message/123456",
            "endpoint",
            "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message_with_id",
            "component",
        ),
    ],
)
def test_transaction_style(
    sentry_init,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(
        integrations=[
            StarletteIntegration(transaction_style=transaction_style),
            FastApiIntegration(transaction_style=transaction_style),
        ],
    )
    app = fastapi_app_factory()

    events = capture_events()

    client = TestClient(app)
    client.get(url)

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}

    # Assert that state is not leaked
    events.clear()
    capture_message("foo")
    (event,) = events

    assert "request" not in event
    assert "transaction" not in event


def test_legacy_setup(
    sentry_init,
    capture_events,
):
    # Check that behaviour does not change
    # if the user just adds the new Integrations
    # and forgets to remove SentryAsgiMiddleware
    sentry_init()
    app = fastapi_app_factory()
    asgi_app = SentryAsgiMiddleware(app)

    events = capture_events()

    client = TestClient(asgi_app)
    client.get("/message/123456")

    (event,) = events
    assert event["transaction"] == "/message/{message_id}"


@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
@mock.patch("sentry_sdk.profiler.transaction_profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, endpoint):
    sentry_init(
        traces_sample_rate=1.0,
        profiles_sample_rate=1.0,
    )
    app = fastapi_app_factory()
    asgi_app = SentryAsgiMiddleware(app)

    envelopes = capture_envelopes()

    client = TestClient(asgi_app)
    response = client.get(endpoint)
    assert response.status_code == 200

    data = json.loads(response.content)

    envelopes = [envelope for envelope in envelopes]
    assert len(envelopes) == 1

    profiles = [item for item in envelopes[0].items if item.type == "profile"]
    assert len(profiles) == 1

    for item in profiles:
        transactions = item.payload.json["transactions"]
        assert len(transactions) == 1
        assert str(data["active"]) == transactions[0]["active_thread_id"]

    transactions = [item for item in envelopes[0].items if item.type == "transaction"]
    assert len(transactions) == 1

    for item in transactions:
        transaction = item.payload.json
        trace_context = transaction["contexts"]["trace"]
        assert str(data["active"]) == trace_context["data"]["thread.id"]


@pytest.mark.asyncio
async def test_original_request_not_scrubbed(sentry_init, capture_events):
    sentry_init(
        integrations=[StarletteIntegration(), FastApiIntegration()],
        traces_sample_rate=1.0,
    )

    app = FastAPI()

    @app.post("/error")
    async def _error(request: Request):
        logging.critical("Oh no!")
        assert request.headers["Authorization"] == "Bearer ohno"
        assert await request.json() == {"password": "secret"}

        return {"error": "Oh no!"}

    events = capture_events()

    client = TestClient(app)
    client.post(
        "/error", json={"password": "secret"}, headers={"Authorization": "Bearer ohno"}
    )

    event = events[0]
    assert event["request"]["data"] == {"password": "[Filtered]"}
    assert event["request"]["headers"]["authorization"] == "[Filtered]"


@pytest.mark.asyncio
def test_response_status_code_ok_in_transaction_context(sentry_init, capture_envelopes):
    """
    Tests that the response status code is added to the transaction "response" context.
    """
    sentry_init(
        integrations=[StarletteIntegration(), FastApiIntegration()],
        traces_sample_rate=1.0,
        release="demo-release",
    )

    envelopes = capture_envelopes()

    app = fastapi_app_factory()

    client = TestClient(app)
    client.get("/message")

    (_, transaction_envelope) = envelopes
    transaction = transaction_envelope.get_transaction_event()

    assert transaction["type"] == "transaction"
    assert len(transaction["contexts"]) > 0
    assert (
        "response" in transaction["contexts"].keys()
    ), "Response context not found in transaction"
    assert transaction["contexts"]["response"]["status_code"] == 200


@pytest.mark.asyncio
def test_response_status_code_error_in_transaction_context(
    sentry_init,
    capture_envelopes,
):
    """
    Tests that the response status code is added to the transaction "response" context.
    """
    sentry_init(
        integrations=[StarletteIntegration(), FastApiIntegration()],
        traces_sample_rate=1.0,
        release="demo-release",
    )

    envelopes = capture_envelopes()

    app = fastapi_app_factory()

    client = TestClient(app)
    with pytest.raises(ZeroDivisionError):
        client.get("/error")

    (
        _,
        _,
        transaction_envelope,
    ) = envelopes
    transaction = transaction_envelope.get_transaction_event()

    assert transaction["type"] == "transaction"
    assert len(transaction["contexts"]) > 0
    assert (
        "response" in transaction["contexts"].keys()
    ), "Response context not found in transaction"
    assert transaction["contexts"]["response"]["status_code"] == 500


@pytest.mark.asyncio
def test_response_status_code_not_found_in_transaction_context(
    sentry_init,
    capture_envelopes,
):
    """
    Tests that the response status code is added to the transaction "response" context.
    """
    sentry_init(
        integrations=[StarletteIntegration(), FastApiIntegration()],
        traces_sample_rate=1.0,
        release="demo-release",
    )

    envelopes = capture_envelopes()

    app = fastapi_app_factory()

    client = TestClient(app)
    client.get("/non-existing-route-123")

    (transaction_envelope,) = envelopes
    transaction = transaction_envelope.get_transaction_event()

    assert transaction["type"] == "transaction"
    assert len(transaction["contexts"]) > 0
    assert (
        "response" in transaction["contexts"].keys()
    ), "Response context not found in transaction"
    assert transaction["contexts"]["response"]["status_code"] == 404


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message_with_id",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "/message/{message_id}",
            "route",
        ),
    ],
)
def test_transaction_name(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    capture_envelopes,
):
    """
    Tests that the transaction name is something meaningful.
    """
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[
            StarletteIntegration(transaction_style=transaction_style),
            FastApiIntegration(transaction_style=transaction_style),
        ],
        traces_sample_rate=1.0,
    )

    envelopes = capture_envelopes()

    app = fastapi_app_factory()

    client = TestClient(app)
    client.get(request_url)

    (_, transaction_envelope) = envelopes
    transaction_event = transaction_envelope.get_transaction_event()

    assert transaction_event["transaction"] == expected_transaction_name
    assert (
        transaction_event["transaction_info"]["source"] == expected_transaction_source
    )


def test_route_endpoint_equal_dependant_call(sentry_init):
    """
    Tests that the route endpoint name is equal to the wrapped dependant call name.
    """
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[
            StarletteIntegration(),
            FastApiIntegration(),
        ],
        traces_sample_rate=1.0,
    )

    app = fastapi_app_factory()

    for route in app.router.routes:
        if not hasattr(route, "dependant"):
            continue
        assert route.endpoint.__qualname__ == route.dependant.call.__qualname__


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "http://testserver/message/123456",
            "url",
        ),
        (
            "/message/123456",
            "url",
            "http://testserver/message/123456",
            "url",
        ),
    ],
)
def test_transaction_name_in_traces_sampler(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
):
    """
    Tests that a custom traces_sampler retrieves a meaningful transaction name.
    In this case the URL or endpoint, because we do not have the route yet.
    """

    def dummy_traces_sampler(sampling_context):
        assert (
            sampling_context["transaction_context"]["name"] == expected_transaction_name
        )
        assert (
            sampling_context["transaction_context"]["source"]
            == expected_transaction_source
        )

    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[StarletteIntegration(transaction_style=transaction_style)],
        traces_sampler=dummy_traces_sampler,
        traces_sample_rate=1.0,
    )

    app = fastapi_app_factory()

    client = TestClient(app)
    client.get(request_url)


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "starlette.middleware.trustedhost.TrustedHostMiddleware",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "http://testserver/message/123456",
            "url",
        ),
    ],
)
def test_transaction_name_in_middleware(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    capture_envelopes,
):
    """
    Tests that the transaction name is something meaningful.
    """
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[
            StarletteIntegration(transaction_style=transaction_style),
            FastApiIntegration(transaction_style=transaction_style),
        ],
        traces_sample_rate=1.0,
    )

    envelopes = capture_envelopes()

    app = fastapi_app_factory()

    app.add_middleware(
        TrustedHostMiddleware,
        allowed_hosts=[
            "example.com",
        ],
    )

    client = TestClient(app)
    client.get(request_url)

    (transaction_envelope,) = envelopes
    transaction_event = transaction_envelope.get_transaction_event()

    assert transaction_event["contexts"]["response"]["status_code"] == 400
    assert transaction_event["transaction"] == expected_transaction_name
    assert (
        transaction_event["transaction_info"]["source"] == expected_transaction_source
    )


@test_starlette.parametrize_test_configurable_status_codes_deprecated
def test_configurable_status_codes_deprecated(
    sentry_init,
    capture_events,
    failed_request_status_codes,
    status_code,
    expected_error,
):
    with pytest.warns(DeprecationWarning):
        starlette_integration = StarletteIntegration(
            failed_request_status_codes=failed_request_status_codes
        )

    with pytest.warns(DeprecationWarning):
        fast_api_integration = FastApiIntegration(
            failed_request_status_codes=failed_request_status_codes
        )

    sentry_init(
        integrations=[
            starlette_integration,
            fast_api_integration,
        ]
    )

    events = capture_events()

    app = FastAPI()

    @app.get("/error")
    async def _error():
        raise HTTPException(status_code)

    client = TestClient(app)
    client.get("/error")

    if expected_error:
        assert len(events) == 1
    else:
        assert not events


@pytest.mark.skipif(
    FASTAPI_VERSION < (0, 80),
    reason="Requires FastAPI >= 0.80, because earlier versions do not support HTTP 'HEAD' requests",
)
def test_transaction_http_method_default(sentry_init, capture_events):
    """
    By default OPTIONS and HEAD requests do not create a transaction.
    """
    # FastAPI is heavily based on Starlette so we also need
    # to enable StarletteIntegration.
    # In the future this will be auto enabled.
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[
            StarletteIntegration(),
            FastApiIntegration(),
        ],
    )

    app = fastapi_app_factory()

    events = capture_events()

    client = TestClient(app)
    client.get("/nomessage")
    client.options("/nomessage")
    client.head("/nomessage")

    assert len(events) == 1

    (event,) = events

    assert event["request"]["method"] == "GET"


@pytest.mark.skipif(
    FASTAPI_VERSION < (0, 80),
    reason="Requires FastAPI >= 0.80, because earlier versions do not support HTTP 'HEAD' requests",
)
def test_transaction_http_method_custom(sentry_init, capture_events):
    # FastAPI is heavily based on Starlette so we also need
    # to enable StarletteIntegration.
    # In the future this will be auto enabled.
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[
            StarletteIntegration(
                http_methods_to_capture=(
                    "OPTIONS",
                    "head",
                ),  # capitalization does not matter
            ),
            FastApiIntegration(
                http_methods_to_capture=(
                    "OPTIONS",
                    "head",
                ),  # capitalization does not matter
            ),
        ],
    )

    app = fastapi_app_factory()

    events = capture_events()

    client = TestClient(app)
    client.get("/nomessage")
    client.options("/nomessage")
    client.head("/nomessage")

    assert len(events) == 2

    (event1, event2) = events

    assert event1["request"]["method"] == "OPTIONS"
    assert event2["request"]["method"] == "HEAD"


@test_starlette.parametrize_test_configurable_status_codes
def test_configurable_status_codes(
    sentry_init,
    capture_events,
    failed_request_status_codes,
    status_code,
    expected_error,
):
    integration_kwargs = {}
    if failed_request_status_codes is not None:
        integration_kwargs["failed_request_status_codes"] = failed_request_status_codes

    with warnings.catch_warnings():
        warnings.simplefilter("error", DeprecationWarning)
        starlette_integration = StarletteIntegration(**integration_kwargs)
        fastapi_integration = FastApiIntegration(**integration_kwargs)

    sentry_init(integrations=[starlette_integration, fastapi_integration])

    events = capture_events()

    app = FastAPI()

    @app.get("/error")
    async def _error():
        raise HTTPException(status_code)

    client = TestClient(app)
    client.get("/error")

    assert len(events) == int(expected_error)
sentry-python-2.18.0/tests/integrations/flask/000077500000000000000000000000001471214654000214215ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/flask/__init__.py000066400000000000000000000000541471214654000235310ustar00rootroot00000000000000import pytest

pytest.importorskip("flask")
sentry-python-2.18.0/tests/integrations/flask/test_flask.py000066400000000000000000000665121471214654000241440ustar00rootroot00000000000000import json
import re
import logging
from io import BytesIO

import pytest
from flask import (
    Flask,
    Response,
    request,
    abort,
    stream_with_context,
    render_template_string,
)
from flask.views import View
from flask_login import LoginManager, login_user

try:
    from werkzeug.wrappers.request import UnsupportedMediaType
except ImportError:
    UnsupportedMediaType = None

import sentry_sdk
import sentry_sdk.integrations.flask as flask_sentry
from sentry_sdk import (
    set_tag,
    capture_message,
    capture_exception,
)
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.serializer import MAX_DATABAG_BREADTH


login_manager = LoginManager()


@pytest.fixture
def app():
    app = Flask(__name__)
    app.config["TESTING"] = True
    app.secret_key = "haha"

    login_manager.init_app(app)

    @app.route("/message")
    def hi():
        capture_message("hi")
        return "ok"

    @app.route("/nomessage")
    def nohi():
        return "ok"

    @app.route("/message/")
    def hi_with_id(message_id):
        capture_message("hi again")
        return "ok"

    return app


@pytest.fixture(params=("auto", "manual"))
def integration_enabled_params(request):
    if request.param == "auto":
        return {"auto_enabling_integrations": True}
    elif request.param == "manual":
        return {"integrations": [flask_sentry.FlaskIntegration()]}
    else:
        raise ValueError(request.param)


def test_has_context(sentry_init, app, capture_events):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    client = app.test_client()
    response = client.get("/message")
    assert response.status_code == 200

    (event,) = events
    assert event["transaction"] == "hi"
    assert "data" not in event["request"]
    assert event["request"]["url"] == "http://localhost/message"


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        ("/message", "endpoint", "hi", "component"),
        ("/message", "url", "/message", "route"),
        ("/message/123456", "endpoint", "hi_with_id", "component"),
        ("/message/123456", "url", "/message/", "route"),
    ],
)
def test_transaction_style(
    sentry_init,
    app,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(
        integrations=[
            flask_sentry.FlaskIntegration(transaction_style=transaction_style)
        ]
    )
    events = capture_events()

    client = app.test_client()
    response = client.get(url)
    assert response.status_code == 200

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


@pytest.mark.parametrize("debug", (True, False))
@pytest.mark.parametrize("testing", (True, False))
def test_errors(
    sentry_init,
    capture_exceptions,
    capture_events,
    app,
    debug,
    testing,
    integration_enabled_params,
):
    sentry_init(**integration_enabled_params)

    app.debug = debug
    app.testing = testing

    @app.route("/")
    def index():
        1 / 0

    exceptions = capture_exceptions()
    events = capture_events()

    client = app.test_client()
    try:
        client.get("/")
    except ZeroDivisionError:
        pass

    (exc,) = exceptions
    assert isinstance(exc, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "flask"


def test_flask_login_not_installed(
    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)

    monkeypatch.setattr(flask_sentry, "flask_login", None)

    events = capture_events()

    client = app.test_client()
    client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


def test_flask_login_not_configured(
    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)

    assert flask_sentry.flask_login

    events = capture_events()
    client = app.test_client()
    client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


def test_flask_login_partially_configured(
    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)

    events = capture_events()

    login_manager = LoginManager()
    login_manager.init_app(app)

    client = app.test_client()
    client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


@pytest.mark.parametrize("send_default_pii", [True, False])
@pytest.mark.parametrize("user_id", [None, "42", 3])
def test_flask_login_configured(
    send_default_pii,
    sentry_init,
    app,
    user_id,
    capture_events,
    monkeypatch,
    integration_enabled_params,
):
    sentry_init(send_default_pii=send_default_pii, **integration_enabled_params)

    class User:
        is_authenticated = is_active = True
        is_anonymous = user_id is not None

        def get_id(self):
            return str(user_id)

    @login_manager.user_loader
    def load_user(user_id):
        if user_id is not None:
            return User()

    @app.route("/login")
    def login():
        if user_id is not None:
            login_user(User())
        return "ok"

    events = capture_events()

    client = app.test_client()
    assert client.get("/login").status_code == 200
    assert not events

    assert client.get("/message").status_code == 200

    (event,) = events
    if user_id is None or not send_default_pii:
        assert event.get("user", {}).get("id") is None
    else:
        assert event["user"]["id"] == str(user_id)


def test_flask_large_json_request(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    data = {"foo": {"bar": "a" * 2000}}

    @app.route("/", methods=["POST"])
    def index():
        assert request.get_json() == data
        assert request.get_data() == json.dumps(data).encode("ascii")
        assert not request.form
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response.status_code == 200

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]["bar"]) == 1024


def test_flask_session_tracking(sentry_init, capture_envelopes, app):
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()],
        release="demo-release",
    )

    @app.route("/")
    def index():
        sentry_sdk.get_isolation_scope().set_user({"ip_address": "1.2.3.4", "id": "42"})
        try:
            raise ValueError("stuff")
        except Exception:
            logging.exception("stuff happened")
        1 / 0

    envelopes = capture_envelopes()

    with app.test_client() as client:
        try:
            client.get("/", headers={"User-Agent": "blafasel/1.0"})
        except ZeroDivisionError:
            pass

    sentry_sdk.get_client().flush()

    (first_event, error_event, session) = envelopes
    first_event = first_event.get_event()
    error_event = error_event.get_event()
    session = session.items[0].payload.json
    aggregates = session["aggregates"]

    assert first_event["exception"]["values"][0]["type"] == "ValueError"
    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"

    assert len(aggregates) == 1
    assert aggregates[0]["crashed"] == 1
    assert aggregates[0]["started"]
    assert session["attrs"]["release"] == "demo-release"


@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_flask_empty_json_request(sentry_init, capture_events, app, data):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    @app.route("/", methods=["POST"])
    def index():
        assert request.get_json() == data
        assert request.get_data() == json.dumps(data).encode("ascii")
        assert not request.form
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response.status_code == 200

    (event,) = events
    assert event["request"]["data"] == data


def test_flask_medium_formdata_request(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    data = {"foo": "a" * 2000}

    @app.route("/", methods=["POST"])
    def index():
        assert request.form["foo"] == data["foo"]
        assert not request.get_data()
        try:
            assert not request.get_json()
        except UnsupportedMediaType:
            # flask/werkzeug 3
            pass
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", data=data)
    assert response.status_code == 200

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]) == 1024


def test_flask_formdata_request_appear_transaction_body(
    sentry_init, capture_events, app
):
    """
    Test that ensures that transaction request data contains body, even if no exception was raised
    """
    sentry_init(integrations=[flask_sentry.FlaskIntegration()], traces_sample_rate=1.0)

    data = {"username": "sentry-user", "age": "26"}

    @app.route("/", methods=["POST"])
    def index():
        assert request.form["username"] == data["username"]
        assert request.form["age"] == data["age"]
        assert not request.get_data()
        try:
            assert not request.get_json()
        except UnsupportedMediaType:
            # flask/werkzeug 3
            pass
        set_tag("view", "yes")
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", data=data)
    assert response.status_code == 200

    event, transaction_event = events

    assert "request" in transaction_event
    assert "data" in transaction_event["request"]
    assert transaction_event["request"]["data"] == data


@pytest.mark.parametrize("input_char", ["a", b"a"])
def test_flask_too_large_raw_request(sentry_init, input_char, capture_events, app):
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="small"
    )

    data = input_char * 2000

    @app.route("/", methods=["POST"])
    def index():
        assert not request.form
        if isinstance(data, bytes):
            assert request.get_data() == data
        else:
            assert request.get_data() == data.encode("ascii")
        try:
            assert not request.get_json()
        except UnsupportedMediaType:
            # flask/werkzeug 3
            pass
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", data=data)
    assert response.status_code == 200

    (event,) = events
    assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
    assert not event["request"]["data"]


def test_flask_files_and_form(sentry_init, capture_events, app):
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="always"
    )

    data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}

    @app.route("/", methods=["POST"])
    def index():
        assert list(request.form) == ["foo"]
        assert list(request.files) == ["file"]
        try:
            assert not request.get_json()
        except UnsupportedMediaType:
            # flask/werkzeug 3
            pass
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", data=data)
    assert response.status_code == 200

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]) == 1024

    assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
    assert not event["request"]["data"]["file"]


def test_json_not_truncated_if_max_request_body_size_is_always(
    sentry_init, capture_events, app
):
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="always"
    )

    data = {
        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
    }

    @app.route("/", methods=["POST"])
    def index():
        assert request.get_json() == data
        assert request.get_data() == json.dumps(data).encode("ascii")
        capture_message("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response.status_code == 200

    (event,) = events
    assert event["request"]["data"] == data


@pytest.mark.parametrize(
    "integrations",
    [
        [flask_sentry.FlaskIntegration()],
        [flask_sentry.FlaskIntegration(), LoggingIntegration(event_level="ERROR")],
    ],
)
def test_errors_not_reported_twice(sentry_init, integrations, capture_events, app):
    sentry_init(integrations=integrations)

    @app.route("/")
    def index():
        try:
            1 / 0
        except Exception as e:
            app.logger.exception(e)
            raise e

    events = capture_events()

    client = app.test_client()
    with pytest.raises(ZeroDivisionError):
        client.get("/")

    assert len(events) == 1


def test_logging(sentry_init, capture_events, app):
    # ensure that Flask's logger magic doesn't break ours
    sentry_init(
        integrations=[
            flask_sentry.FlaskIntegration(),
            LoggingIntegration(event_level="ERROR"),
        ]
    )

    @app.route("/")
    def index():
        app.logger.error("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    client.get("/")

    (event,) = events
    assert event["level"] == "error"


def test_no_errors_without_request(app, sentry_init):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    with app.app_context():
        capture_exception(ValueError())


def test_cli_commands_raise(app):
    if not hasattr(app, "cli"):
        pytest.skip("Too old flask version")

    from flask.cli import ScriptInfo

    @app.cli.command()
    def foo():
        1 / 0

    def create_app(*_):
        return app

    with pytest.raises(ZeroDivisionError):
        app.cli.main(
            args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=create_app)
        )


def test_wsgi_level_error_is_caught(
    app, capture_exceptions, capture_events, sentry_init
):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    def wsgi_app(environ, start_response):
        1 / 0

    app.wsgi_app = wsgi_app

    client = app.test_client()

    exceptions = capture_exceptions()
    events = capture_events()

    with pytest.raises(ZeroDivisionError) as exc:
        client.get("/")

    (error,) = exceptions

    assert error is exc.value

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "wsgi"


def test_500(sentry_init, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    app.debug = False
    app.testing = False

    @app.route("/")
    def index():
        1 / 0

    @app.errorhandler(500)
    def error_handler(err):
        return "Sentry error."

    client = app.test_client()
    response = client.get("/")

    assert response.data.decode("utf-8") == "Sentry error."


def test_error_in_errorhandler(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    app.debug = False
    app.testing = False

    @app.route("/")
    def index():
        raise ValueError()

    @app.errorhandler(500)
    def error_handler(err):
        1 / 0

    events = capture_events()

    client = app.test_client()

    with pytest.raises(ZeroDivisionError):
        client.get("/")

    event1, event2 = events

    (exception,) = event1["exception"]["values"]
    assert exception["type"] == "ValueError"

    exception = event2["exception"]["values"][-1]
    assert exception["type"] == "ZeroDivisionError"


def test_bad_request_not_captured(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    @app.route("/")
    def index():
        abort(400)

    client = app.test_client()

    client.get("/")

    assert not events


def test_does_not_leak_scope(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    sentry_sdk.get_isolation_scope().set_tag("request_data", False)

    @app.route("/")
    def index():
        sentry_sdk.get_isolation_scope().set_tag("request_data", True)

        def generate():
            for row in range(1000):
                assert sentry_sdk.get_isolation_scope()._tags["request_data"]

                yield str(row) + "\n"

        return Response(stream_with_context(generate()), mimetype="text/csv")

    client = app.test_client()
    response = client.get("/")
    assert response.data.decode() == "".join(str(row) + "\n" for row in range(1000))
    assert not events

    assert not sentry_sdk.get_isolation_scope()._tags["request_data"]


def test_scoped_test_client(sentry_init, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    @app.route("/")
    def index():
        return "ok"

    with app.test_client() as client:
        response = client.get("/")
        assert response.status_code == 200


@pytest.mark.parametrize("exc_cls", [ZeroDivisionError, Exception])
def test_errorhandler_for_exception_swallows_exception(
    sentry_init, app, capture_events, exc_cls
):
    # In contrast to error handlers for a status code, error
    # handlers for exceptions can swallow the exception (this is
    # just how the Flask signal works)
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    @app.route("/")
    def index():
        1 / 0

    @app.errorhandler(exc_cls)
    def zerodivision(e):
        return "ok"

    with app.test_client() as client:
        response = client.get("/")
        assert response.status_code == 200

    assert not events


def test_tracing_success(sentry_init, capture_events, app):
    sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()])

    @app.before_request
    def _():
        set_tag("before_request", "yes")

    @app.route("/message_tx")
    def hi_tx():
        set_tag("view", "yes")
        capture_message("hi")
        return "ok"

    events = capture_events()

    with app.test_client() as client:
        response = client.get("/message_tx")
        assert response.status_code == 200

    message_event, transaction_event = events

    assert transaction_event["type"] == "transaction"
    assert transaction_event["transaction"] == "hi_tx"
    assert transaction_event["contexts"]["trace"]["status"] == "ok"
    assert transaction_event["tags"]["view"] == "yes"
    assert transaction_event["tags"]["before_request"] == "yes"

    assert message_event["message"] == "hi"
    assert message_event["transaction"] == "hi_tx"
    assert message_event["tags"]["view"] == "yes"
    assert message_event["tags"]["before_request"] == "yes"


def test_tracing_error(sentry_init, capture_events, app):
    sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()])

    events = capture_events()

    @app.route("/error")
    def error():
        1 / 0

    with pytest.raises(ZeroDivisionError):
        with app.test_client() as client:
            response = client.get("/error")
            assert response.status_code == 500

    error_event, transaction_event = events

    assert transaction_event["type"] == "transaction"
    assert transaction_event["transaction"] == "error"
    assert transaction_event["contexts"]["trace"]["status"] == "internal_error"

    assert error_event["transaction"] == "error"
    (exception,) = error_event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"


def test_error_has_trace_context_if_tracing_disabled(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    events = capture_events()

    @app.route("/error")
    def error():
        1 / 0

    with pytest.raises(ZeroDivisionError):
        with app.test_client() as client:
            response = client.get("/error")
            assert response.status_code == 500

    (error_event,) = events

    assert error_event["contexts"]["trace"]


def test_class_based_views(sentry_init, app, capture_events):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    @app.route("/")
    class HelloClass(View):
        def dispatch_request(self):
            capture_message("hi")
            return "ok"

    app.add_url_rule("/hello-class/", view_func=HelloClass.as_view("hello_class"))

    with app.test_client() as client:
        response = client.get("/hello-class/")
        assert response.status_code == 200

    (event,) = events

    assert event["message"] == "hi"
    assert event["transaction"] == "hello_class"


@pytest.mark.parametrize(
    "template_string", ["{{ sentry_trace }}", "{{ sentry_trace_meta }}"]
)
def test_template_tracing_meta(sentry_init, app, capture_events, template_string):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
    events = capture_events()

    @app.route("/")
    def index():
        capture_message(sentry_sdk.get_traceparent() + "\n" + sentry_sdk.get_baggage())
        return render_template_string(template_string)

    with app.test_client() as client:
        response = client.get("/")
        assert response.status_code == 200

        rendered_meta = response.data.decode("utf-8")
        traceparent, baggage = events[0]["message"].split("\n")
        assert traceparent != ""
        assert baggage != ""

    match = re.match(
        r'^',
        rendered_meta,
    )
    assert match is not None
    assert match.group(1) == traceparent

    rendered_baggage = match.group(2)
    assert rendered_baggage == baggage


def test_dont_override_sentry_trace_context(sentry_init, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    @app.route("/")
    def index():
        return render_template_string("{{ sentry_trace }}", sentry_trace="hi")

    with app.test_client() as client:
        response = client.get("/")
        assert response.status_code == 200
        assert response.data == b"hi"


def test_request_not_modified_by_reference(sentry_init, capture_events, app):
    sentry_init(integrations=[flask_sentry.FlaskIntegration()])

    @app.route("/", methods=["POST"])
    def index():
        logging.critical("oops")
        assert request.get_json() == {"password": "ohno"}
        assert request.headers["Authorization"] == "Bearer ohno"
        return "ok"

    events = capture_events()

    client = app.test_client()
    client.post(
        "/", json={"password": "ohno"}, headers={"Authorization": "Bearer ohno"}
    )

    (event,) = events

    assert event["request"]["data"]["password"] == "[Filtered]"
    assert event["request"]["headers"]["Authorization"] == "[Filtered]"


def test_response_status_code_ok_in_transaction_context(
    sentry_init, capture_envelopes, app
):
    """
    Tests that the response status code is added to the transaction context.
    This also works for when there is an Exception during the request, but somehow the test flask app doesn't seem to trigger that.
    """
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()],
        traces_sample_rate=1.0,
        release="demo-release",
    )

    envelopes = capture_envelopes()

    client = app.test_client()
    client.get("/message")

    sentry_sdk.get_client().flush()

    (_, transaction_envelope, _) = envelopes
    transaction = transaction_envelope.get_transaction_event()

    assert transaction["type"] == "transaction"
    assert len(transaction["contexts"]) > 0
    assert (
        "response" in transaction["contexts"].keys()
    ), "Response context not found in transaction"
    assert transaction["contexts"]["response"]["status_code"] == 200


def test_response_status_code_not_found_in_transaction_context(
    sentry_init, capture_envelopes, app
):
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()],
        traces_sample_rate=1.0,
        release="demo-release",
    )

    envelopes = capture_envelopes()

    client = app.test_client()
    client.get("/not-existing-route")

    sentry_sdk.get_client().flush()

    (transaction_envelope, _) = envelopes
    transaction = transaction_envelope.get_transaction_event()

    assert transaction["type"] == "transaction"
    assert len(transaction["contexts"]) > 0
    assert (
        "response" in transaction["contexts"].keys()
    ), "Response context not found in transaction"
    assert transaction["contexts"]["response"]["status_code"] == 404


def test_span_origin(sentry_init, app, capture_events):
    sentry_init(
        integrations=[flask_sentry.FlaskIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = app.test_client()
    client.get("/message")

    (_, event) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.flask"


def test_transaction_http_method_default(
    sentry_init,
    app,
    capture_events,
):
    """
    By default OPTIONS and HEAD requests do not create a transaction.
    """
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[flask_sentry.FlaskIntegration()],
    )
    events = capture_events()

    client = app.test_client()
    response = client.get("/nomessage")
    assert response.status_code == 200

    response = client.options("/nomessage")
    assert response.status_code == 200

    response = client.head("/nomessage")
    assert response.status_code == 200

    (event,) = events

    assert len(events) == 1
    assert event["request"]["method"] == "GET"


def test_transaction_http_method_custom(
    sentry_init,
    app,
    capture_events,
):
    """
    Configure FlaskIntegration to ONLY capture OPTIONS and HEAD requests.
    """
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[
            flask_sentry.FlaskIntegration(
                http_methods_to_capture=(
                    "OPTIONS",
                    "head",
                )  # capitalization does not matter
            )  # case does not matter
        ],
    )
    events = capture_events()

    client = app.test_client()
    response = client.get("/nomessage")
    assert response.status_code == 200

    response = client.options("/nomessage")
    assert response.status_code == 200

    response = client.head("/nomessage")
    assert response.status_code == 200

    assert len(events) == 2

    (event1, event2) = events
    assert event1["request"]["method"] == "OPTIONS"
    assert event2["request"]["method"] == "HEAD"
sentry-python-2.18.0/tests/integrations/gcp/000077500000000000000000000000001471214654000210725ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/gcp/test_gcp.py000066400000000000000000000423061471214654000232610ustar00rootroot00000000000000"""
# GCP Cloud Functions unit tests

"""

import json
from textwrap import dedent
import tempfile
import sys
import subprocess

import pytest
import os.path
import os


FUNCTIONS_PRELUDE = """
from unittest.mock import Mock
import __main__ as gcp_functions
import os

# Initializing all the necessary environment variables
os.environ["FUNCTION_TIMEOUT_SEC"] = "3"
os.environ["FUNCTION_NAME"] = "Google Cloud function"
os.environ["ENTRY_POINT"] = "cloud_function"
os.environ["FUNCTION_IDENTITY"] = "func_ID"
os.environ["FUNCTION_REGION"] = "us-central1"
os.environ["GCP_PROJECT"] = "serverless_project"

def log_return_value(func):
    def inner(*args, **kwargs):
        rv = func(*args, **kwargs)

        print("\\nRETURN VALUE: {}\\n".format(json.dumps(rv)))

        return rv

    return inner

gcp_functions.worker_v1 = Mock()
gcp_functions.worker_v1.FunctionHandler = Mock()
gcp_functions.worker_v1.FunctionHandler.invoke_user_function = log_return_value(cloud_function)


import sentry_sdk
from sentry_sdk.integrations.gcp import GcpIntegration
import json
import time

from sentry_sdk.transport import HttpTransport

def event_processor(event):
    # Adding delay which would allow us to capture events.
    time.sleep(1)
    return event

def envelope_processor(envelope):
    (item,) = envelope.items
    return item.get_bytes()

class TestTransport(HttpTransport):
    def capture_envelope(self, envelope):
        envelope_item = envelope_processor(envelope)
        print("\\nENVELOPE: {}\\n".format(envelope_item.decode(\"utf-8\")))


def init_sdk(timeout_warning=False, **extra_init_args):
    sentry_sdk.init(
        dsn="https://123abc@example.com/123",
        transport=TestTransport,
        integrations=[GcpIntegration(timeout_warning=timeout_warning)],
        shutdown_timeout=10,
        # excepthook -> dedupe -> event_processor client report gets added
        # which we don't really care about for these tests
        send_client_reports=False,
        **extra_init_args
    )

"""


@pytest.fixture
def run_cloud_function():
    def inner(code, subprocess_kwargs=()):
        envelope_items = []
        return_value = None

        # STEP : Create a zip of cloud function

        subprocess_kwargs = dict(subprocess_kwargs)

        with tempfile.TemporaryDirectory() as tmpdir:
            main_py = os.path.join(tmpdir, "main.py")
            with open(main_py, "w") as f:
                f.write(code)

            setup_cfg = os.path.join(tmpdir, "setup.cfg")

            with open(setup_cfg, "w") as f:
                f.write("[install]\nprefix=")

            subprocess.check_call(
                [sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")],
                **subprocess_kwargs
            )

            subprocess.check_call(
                "pip install ../*.tar.gz -t .",
                cwd=tmpdir,
                shell=True,
                **subprocess_kwargs
            )

            stream = os.popen("python {}/main.py".format(tmpdir))
            stream_data = stream.read()

            stream.close()

            for line in stream_data.splitlines():
                print("GCP:", line)
                if line.startswith("ENVELOPE: "):
                    line = line[len("ENVELOPE: ") :]
                    envelope_items.append(json.loads(line))
                elif line.startswith("RETURN VALUE: "):
                    line = line[len("RETURN VALUE: ") :]
                    return_value = json.loads(line)
                else:
                    continue

            stream.close()

        return envelope_items, return_value

    return inner


def test_handled_exception(run_cloud_function):
    envelope_items, return_value = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            raise Exception("something went wrong")
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(timeout_warning=False)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    assert envelope_items[0]["level"] == "error"
    (exception,) = envelope_items[0]["exception"]["values"]

    assert exception["type"] == "Exception"
    assert exception["value"] == "something went wrong"
    assert exception["mechanism"]["type"] == "gcp"
    assert not exception["mechanism"]["handled"]


def test_unhandled_exception(run_cloud_function):
    envelope_items, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            x = 3/0
            return "3"
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(timeout_warning=False)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    assert envelope_items[0]["level"] == "error"
    (exception,) = envelope_items[0]["exception"]["values"]

    assert exception["type"] == "ZeroDivisionError"
    assert exception["value"] == "division by zero"
    assert exception["mechanism"]["type"] == "gcp"
    assert not exception["mechanism"]["handled"]


def test_timeout_error(run_cloud_function):
    envelope_items, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            time.sleep(10)
            return "3"
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(timeout_warning=True)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    assert envelope_items[0]["level"] == "error"
    (exception,) = envelope_items[0]["exception"]["values"]

    assert exception["type"] == "ServerlessTimeoutWarning"
    assert (
        exception["value"]
        == "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds."
    )
    assert exception["mechanism"]["type"] == "threading"
    assert not exception["mechanism"]["handled"]


def test_performance_no_error(run_cloud_function):
    envelope_items, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            return "test_string"
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )

    assert envelope_items[0]["type"] == "transaction"
    assert envelope_items[0]["contexts"]["trace"]["op"] == "function.gcp"
    assert envelope_items[0]["transaction"].startswith("Google Cloud function")
    assert envelope_items[0]["transaction_info"] == {"source": "component"}
    assert envelope_items[0]["transaction"] in envelope_items[0]["request"]["url"]


def test_performance_error(run_cloud_function):
    envelope_items, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            raise Exception("something went wrong")
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )

    assert envelope_items[0]["level"] == "error"
    (exception,) = envelope_items[0]["exception"]["values"]

    assert exception["type"] == "Exception"
    assert exception["value"] == "something went wrong"
    assert exception["mechanism"]["type"] == "gcp"
    assert not exception["mechanism"]["handled"]

    assert envelope_items[1]["type"] == "transaction"
    assert envelope_items[1]["contexts"]["trace"]["op"] == "function.gcp"
    assert envelope_items[1]["transaction"].startswith("Google Cloud function")
    assert envelope_items[1]["transaction"] in envelope_items[0]["request"]["url"]


def test_traces_sampler_gets_correct_values_in_sampling_context(
    run_cloud_function, DictionaryContaining  # noqa:N803
):
    # TODO: There are some decent sized hacks below. For more context, see the
    # long comment in the test of the same name in the AWS integration. The
    # situations there and here aren't identical, but they're similar enough
    # that solving one would probably solve both.

    import inspect

    _, return_value = run_cloud_function(
        dedent(
            """
            functionhandler = None
            event = {
                "type": "chase",
                "chasers": ["Maisey", "Charlie"],
                "num_squirrels": 2,
            }
            def cloud_function(functionhandler, event):
                # this runs after the transaction has started, which means we
                # can make assertions about traces_sampler
                try:
                    traces_sampler.assert_any_call(
                        DictionaryContaining({
                            "gcp_env": DictionaryContaining({
                                "function_name": "chase_into_tree",
                                "function_region": "dogpark",
                                "function_project": "SquirrelChasing",
                            }),
                            "gcp_event": {
                                "type": "chase",
                                "chasers": ["Maisey", "Charlie"],
                                "num_squirrels": 2,
                            },
                        })
                    )
                except AssertionError:
                    # catch the error and return it because the error itself will
                    # get swallowed by the SDK as an "internal exception"
                    return {"AssertionError raised": True,}

                return {"AssertionError raised": False,}
            """
        )
        + FUNCTIONS_PRELUDE
        + dedent(inspect.getsource(DictionaryContaining))
        + dedent(
            """
            os.environ["FUNCTION_NAME"] = "chase_into_tree"
            os.environ["FUNCTION_REGION"] = "dogpark"
            os.environ["GCP_PROJECT"] = "SquirrelChasing"

            def _safe_is_equal(x, y):
                # copied from conftest.py - see docstring and comments there
                try:
                    is_equal = x.__eq__(y)
                except AttributeError:
                    is_equal = NotImplemented

                if is_equal == NotImplemented:
                    return x == y

                return is_equal

            traces_sampler = Mock(return_value=True)

            init_sdk(
                traces_sampler=traces_sampler,
            )

            gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
            """
        )
    )

    assert return_value["AssertionError raised"] is False


def test_error_has_new_trace_context_performance_enabled(run_cloud_function):
    """
    Check if an 'trace' context is added to errros and transactions when performance monitoring is enabled.
    """
    envelope_items, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            sentry_sdk.capture_message("hi")
            x = 3/0
            return "3"
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    (msg_event, error_event, transaction_event) = envelope_items

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
    )


def test_error_has_new_trace_context_performance_disabled(run_cloud_function):
    """
    Check if an 'trace' context is added to errros and transactions when performance monitoring is disabled.
    """
    envelope_items, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            sentry_sdk.capture_message("hi")
            x = 3/0
            return "3"
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=None),  # this is the default, just added for clarity
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )

    (msg_event, error_event) = envelope_items

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
    )


def test_error_has_existing_trace_context_performance_enabled(run_cloud_function):
    """
    Check if an 'trace' context is added to errros and transactions
    from the incoming 'sentry-trace' header when performance monitoring is enabled.
    """
    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    envelope_items, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None

        from collections import namedtuple
        GCPEvent = namedtuple("GCPEvent", ["headers"])
        event = GCPEvent(headers={"sentry-trace": "%s"})

        def cloud_function(functionhandler, event):
            sentry_sdk.capture_message("hi")
            x = 3/0
            return "3"
        """
            % sentry_trace_header
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    (msg_event, error_event, transaction_event) = envelope_items

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )


def test_error_has_existing_trace_context_performance_disabled(run_cloud_function):
    """
    Check if an 'trace' context is added to errros and transactions
    from the incoming 'sentry-trace' header when performance monitoring is disabled.
    """
    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    envelope_items, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None

        from collections import namedtuple
        GCPEvent = namedtuple("GCPEvent", ["headers"])
        event = GCPEvent(headers={"sentry-trace": "%s"})

        def cloud_function(functionhandler, event):
            sentry_sdk.capture_message("hi")
            x = 3/0
            return "3"
        """
            % sentry_trace_header
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=None),  # this is the default, just added for clarity
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )
    (msg_event, error_event) = envelope_items

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )


def test_span_origin(run_cloud_function):
    events, _ = run_cloud_function(
        dedent(
            """
        functionhandler = None
        event = {}
        def cloud_function(functionhandler, event):
            return "test_string"
        """
        )
        + FUNCTIONS_PRELUDE
        + dedent(
            """
        init_sdk(traces_sample_rate=1.0)
        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
        """
        )
    )

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "auto.function.gcp"
sentry-python-2.18.0/tests/integrations/gql/000077500000000000000000000000001471214654000211045ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/gql/__init__.py000066400000000000000000000000521471214654000232120ustar00rootroot00000000000000import pytest

pytest.importorskip("gql")
sentry-python-2.18.0/tests/integrations/gql/test_gql.py000066400000000000000000000063501471214654000233040ustar00rootroot00000000000000import pytest

import responses
from gql import gql
from gql import Client
from gql.transport.exceptions import TransportQueryError
from gql.transport.requests import RequestsHTTPTransport
from sentry_sdk.integrations.gql import GQLIntegration


@responses.activate
def _execute_mock_query(response_json):
    url = "http://example.com/graphql"
    query_string = """
        query Example {
            example
        }
    """

    # Mock the GraphQL server response
    responses.add(
        method=responses.POST,
        url=url,
        json=response_json,
        status=200,
    )

    transport = RequestsHTTPTransport(url=url)
    client = Client(transport=transport)
    query = gql(query_string)

    return client.execute(query)


def _make_erroneous_query(capture_events):
    """
    Make an erroneous GraphQL query, and assert that the error was reraised, that
    exactly one event was recorded, and that the exception recorded was a
    TransportQueryError. Then, return the event to allow further verifications.
    """
    events = capture_events()
    response_json = {"errors": ["something bad happened"]}

    with pytest.raises(TransportQueryError):
        _execute_mock_query(response_json)

    assert (
        len(events) == 1
    ), "the sdk captured %d events, but 1 event was expected" % len(events)

    (event,) = events
    (exception,) = event["exception"]["values"]

    assert (
        exception["type"] == "TransportQueryError"
    ), "%s was captured, but we expected a TransportQueryError" % exception(type)

    assert "request" in event

    return event


def test_gql_init(sentry_init):
    """
    Integration test to ensure we can initialize the SDK with the GQL Integration
    """
    sentry_init(integrations=[GQLIntegration()])


def test_real_gql_request_no_error(sentry_init, capture_events):
    """
    Integration test verifying that the GQLIntegration works as expected with successful query.
    """
    sentry_init(integrations=[GQLIntegration()])
    events = capture_events()

    response_data = {"example": "This is the example"}
    response_json = {"data": response_data}

    result = _execute_mock_query(response_json)

    assert (
        result == response_data
    ), "client.execute returned a different value from what it received from the server"
    assert (
        len(events) == 0
    ), "the sdk captured an event, even though the query was successful"


def test_real_gql_request_with_error_no_pii(sentry_init, capture_events):
    """
    Integration test verifying that the GQLIntegration works as expected with query resulting
    in a GraphQL error, and that PII is not sent.
    """
    sentry_init(integrations=[GQLIntegration()])

    event = _make_erroneous_query(capture_events)

    assert "data" not in event["request"]
    assert "response" not in event["contexts"]


def test_real_gql_request_with_error_with_pii(sentry_init, capture_events):
    """
    Integration test verifying that the GQLIntegration works as expected with query resulting
    in a GraphQL error, and that PII is not sent.
    """
    sentry_init(integrations=[GQLIntegration()], send_default_pii=True)

    event = _make_erroneous_query(capture_events)

    assert "data" in event["request"]
    assert "response" in event["contexts"]
sentry-python-2.18.0/tests/integrations/graphene/000077500000000000000000000000001471214654000221125ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/graphene/__init__.py000066400000000000000000000001531471214654000242220ustar00rootroot00000000000000import pytest

pytest.importorskip("graphene")
pytest.importorskip("fastapi")
pytest.importorskip("flask")
sentry-python-2.18.0/tests/integrations/graphene/test_graphene.py000066400000000000000000000175721471214654000253300ustar00rootroot00000000000000from fastapi import FastAPI, Request
from fastapi.testclient import TestClient
from flask import Flask, request, jsonify
from graphene import ObjectType, String, Schema

from sentry_sdk.consts import OP
from sentry_sdk.integrations.fastapi import FastApiIntegration
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.graphene import GrapheneIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration


class Query(ObjectType):
    hello = String(first_name=String(default_value="stranger"))
    goodbye = String()

    def resolve_hello(root, info, first_name):  # noqa: N805
        return "Hello {}!".format(first_name)

    def resolve_goodbye(root, info):  # noqa: N805
        raise RuntimeError("oh no!")


def test_capture_request_if_available_and_send_pii_is_on_async(
    sentry_init, capture_events
):
    sentry_init(
        send_default_pii=True,
        integrations=[
            GrapheneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = Schema(query=Query)

    async_app = FastAPI()

    @async_app.post("/graphql")
    async def graphql_server_async(request: Request):
        data = await request.json()
        result = await schema.execute_async(data["query"])
        return result.data

    query = {"query": "query ErrorQuery {goodbye}"}
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
    assert event["request"]["api_target"] == "graphql"
    assert event["request"]["data"] == query


def test_capture_request_if_available_and_send_pii_is_on_sync(
    sentry_init, capture_events
):
    sentry_init(
        send_default_pii=True,
        integrations=[GrapheneIntegration(), FlaskIntegration()],
    )
    events = capture_events()

    schema = Schema(query=Query)

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server_sync():
        data = request.get_json()
        result = schema.execute(data["query"])
        return jsonify(result.data), 200

    query = {"query": "query ErrorQuery {goodbye}"}
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
    assert event["request"]["api_target"] == "graphql"
    assert event["request"]["data"] == query


def test_do_not_capture_request_if_send_pii_is_off_async(sentry_init, capture_events):
    sentry_init(
        integrations=[
            GrapheneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = Schema(query=Query)

    async_app = FastAPI()

    @async_app.post("/graphql")
    async def graphql_server_async(request: Request):
        data = await request.json()
        result = await schema.execute_async(data["query"])
        return result.data

    query = {"query": "query ErrorQuery {goodbye}"}
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
    assert "data" not in event["request"]
    assert "response" not in event["contexts"]


def test_do_not_capture_request_if_send_pii_is_off_sync(sentry_init, capture_events):
    sentry_init(
        integrations=[GrapheneIntegration(), FlaskIntegration()],
    )
    events = capture_events()

    schema = Schema(query=Query)

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server_sync():
        data = request.get_json()
        result = schema.execute(data["query"])
        return jsonify(result.data), 200

    query = {"query": "query ErrorQuery {goodbye}"}
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
    assert "data" not in event["request"]
    assert "response" not in event["contexts"]


def test_no_event_if_no_errors_async(sentry_init, capture_events):
    sentry_init(
        integrations=[
            GrapheneIntegration(),
            FastApiIntegration(),
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    schema = Schema(query=Query)

    async_app = FastAPI()

    @async_app.post("/graphql")
    async def graphql_server_async(request: Request):
        data = await request.json()
        result = await schema.execute_async(data["query"])
        return result.data

    query = {
        "query": "query GreetingQuery { hello }",
    }
    client = TestClient(async_app)
    client.post("/graphql", json=query)

    assert len(events) == 0


def test_no_event_if_no_errors_sync(sentry_init, capture_events):
    sentry_init(
        integrations=[
            GrapheneIntegration(),
            FlaskIntegration(),
        ],
    )
    events = capture_events()

    schema = Schema(query=Query)

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server_sync():
        data = request.get_json()
        result = schema.execute(data["query"])
        return jsonify(result.data), 200

    query = {
        "query": "query GreetingQuery { hello }",
    }
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 0


def test_graphql_span_holds_query_information(sentry_init, capture_events):
    sentry_init(
        integrations=[GrapheneIntegration(), FlaskIntegration()],
        enable_tracing=True,
        default_integrations=False,
    )
    events = capture_events()

    schema = Schema(query=Query)

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server_sync():
        data = request.get_json()
        result = schema.execute(data["query"], operation_name=data.get("operationName"))
        return jsonify(result.data), 200

    query = {
        "query": "query GreetingQuery { hello }",
        "operationName": "GreetingQuery",
    }
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert len(event["spans"]) == 1

    (span,) = event["spans"]
    assert span["op"] == OP.GRAPHQL_QUERY
    assert span["description"] == query["operationName"]
    assert span["data"]["graphql.document"] == query["query"]
    assert span["data"]["graphql.operation.name"] == query["operationName"]
    assert span["data"]["graphql.operation.type"] == "query"


def test_breadcrumbs_hold_query_information_on_error(sentry_init, capture_events):
    sentry_init(
        integrations=[
            GrapheneIntegration(),
        ],
        default_integrations=False,
    )
    events = capture_events()

    schema = Schema(query=Query)

    sync_app = Flask(__name__)

    @sync_app.route("/graphql", methods=["POST"])
    def graphql_server_sync():
        data = request.get_json()
        result = schema.execute(data["query"], operation_name=data.get("operationName"))
        return jsonify(result.data), 200

    query = {
        "query": "query ErrorQuery { goodbye }",
        "operationName": "ErrorQuery",
    }
    client = sync_app.test_client()
    client.post("/graphql", json=query)

    assert len(events) == 1

    (event,) = events
    assert len(event["breadcrumbs"]) == 1

    breadcrumbs = event["breadcrumbs"]["values"]
    assert len(breadcrumbs) == 1

    (breadcrumb,) = breadcrumbs
    assert breadcrumb["category"] == "graphql.operation"
    assert breadcrumb["data"]["operation_name"] == query["operationName"]
    assert breadcrumb["data"]["operation_type"] == "query"
    assert breadcrumb["type"] == "default"
sentry-python-2.18.0/tests/integrations/grpc/000077500000000000000000000000001471214654000212545ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/grpc/__init__.py000066400000000000000000000002611471214654000233640ustar00rootroot00000000000000import sys
from pathlib import Path

import pytest

# For imports inside gRPC autogenerated code to work
sys.path.append(str(Path(__file__).parent))
pytest.importorskip("grpc")
sentry-python-2.18.0/tests/integrations/grpc/compile_test_services.sh000077500000000000000000000006231471214654000262060ustar00rootroot00000000000000#!/usr/bin/env bash

# Run this script from the project root to generate the python code

TARGET_PATH=./tests/integrations/grpc

# Create python file
python -m grpc_tools.protoc \
    --proto_path=$TARGET_PATH/protos/ \
    --python_out=$TARGET_PATH/ \
    --pyi_out=$TARGET_PATH/ \
    --grpc_python_out=$TARGET_PATH/ \
    $TARGET_PATH/protos/grpc_test_service.proto

echo Code generation successfull
sentry-python-2.18.0/tests/integrations/grpc/grpc_test_service_pb2.py000066400000000000000000000031101471214654000260760ustar00rootroot00000000000000# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler.  DO NOT EDIT!
# source: grpc_test_service.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)

_sym_db = _symbol_database.Default()




DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17grpc_test_service.proto\x12\x10grpc_test_server\"\x1f\n\x0fgRPCTestMessage\x12\x0c\n\x04text\x18\x01 \x01(\t2\xf8\x02\n\x0fgRPCTestService\x12Q\n\tTestServe\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage\x12Y\n\x0fTestUnaryStream\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage0\x01\x12\\\n\x10TestStreamStream\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage(\x01\x30\x01\x12Y\n\x0fTestStreamUnary\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage(\x01\x62\x06proto3')

_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'grpc_test_service_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
  DESCRIPTOR._options = None
  _globals['_GRPCTESTMESSAGE']._serialized_start=45
  _globals['_GRPCTESTMESSAGE']._serialized_end=76
  _globals['_GRPCTESTSERVICE']._serialized_start=79
  _globals['_GRPCTESTSERVICE']._serialized_end=455
# @@protoc_insertion_point(module_scope)
sentry-python-2.18.0/tests/integrations/grpc/grpc_test_service_pb2.pyi000066400000000000000000000006051471214654000262550ustar00rootroot00000000000000from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Optional as _Optional

DESCRIPTOR: _descriptor.FileDescriptor

class gRPCTestMessage(_message.Message):
    __slots__ = ["text"]
    TEXT_FIELD_NUMBER: _ClassVar[int]
    text: str
    def __init__(self, text: _Optional[str] = ...) -> None: ...
sentry-python-2.18.0/tests/integrations/grpc/grpc_test_service_pb2_grpc.py000066400000000000000000000166051471214654000271260ustar00rootroot00000000000000# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc

import grpc_test_service_pb2 as grpc__test__service__pb2


class gRPCTestServiceStub(object):
    """Missing associated documentation comment in .proto file."""

    def __init__(self, channel):
        """Constructor.

        Args:
            channel: A grpc.Channel.
        """
        self.TestServe = channel.unary_unary(
                '/grpc_test_server.gRPCTestService/TestServe',
                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                )
        self.TestUnaryStream = channel.unary_stream(
                '/grpc_test_server.gRPCTestService/TestUnaryStream',
                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                )
        self.TestStreamStream = channel.stream_stream(
                '/grpc_test_server.gRPCTestService/TestStreamStream',
                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                )
        self.TestStreamUnary = channel.stream_unary(
                '/grpc_test_server.gRPCTestService/TestStreamUnary',
                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                )


class gRPCTestServiceServicer(object):
    """Missing associated documentation comment in .proto file."""

    def TestServe(self, request, context):
        """Missing associated documentation comment in .proto file."""
        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
        context.set_details('Method not implemented!')
        raise NotImplementedError('Method not implemented!')

    def TestUnaryStream(self, request, context):
        """Missing associated documentation comment in .proto file."""
        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
        context.set_details('Method not implemented!')
        raise NotImplementedError('Method not implemented!')

    def TestStreamStream(self, request_iterator, context):
        """Missing associated documentation comment in .proto file."""
        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
        context.set_details('Method not implemented!')
        raise NotImplementedError('Method not implemented!')

    def TestStreamUnary(self, request_iterator, context):
        """Missing associated documentation comment in .proto file."""
        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
        context.set_details('Method not implemented!')
        raise NotImplementedError('Method not implemented!')


def add_gRPCTestServiceServicer_to_server(servicer, server):
    rpc_method_handlers = {
            'TestServe': grpc.unary_unary_rpc_method_handler(
                    servicer.TestServe,
                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            ),
            'TestUnaryStream': grpc.unary_stream_rpc_method_handler(
                    servicer.TestUnaryStream,
                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            ),
            'TestStreamStream': grpc.stream_stream_rpc_method_handler(
                    servicer.TestStreamStream,
                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            ),
            'TestStreamUnary': grpc.stream_unary_rpc_method_handler(
                    servicer.TestStreamUnary,
                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            ),
    }
    generic_handler = grpc.method_handlers_generic_handler(
            'grpc_test_server.gRPCTestService', rpc_method_handlers)
    server.add_generic_rpc_handlers((generic_handler,))


 # This class is part of an EXPERIMENTAL API.
class gRPCTestService(object):
    """Missing associated documentation comment in .proto file."""

    @staticmethod
    def TestServe(request,
            target,
            options=(),
            channel_credentials=None,
            call_credentials=None,
            insecure=False,
            compression=None,
            wait_for_ready=None,
            timeout=None,
            metadata=None):
        return grpc.experimental.unary_unary(request, target, '/grpc_test_server.gRPCTestService/TestServe',
            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            grpc__test__service__pb2.gRPCTestMessage.FromString,
            options, channel_credentials,
            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

    @staticmethod
    def TestUnaryStream(request,
            target,
            options=(),
            channel_credentials=None,
            call_credentials=None,
            insecure=False,
            compression=None,
            wait_for_ready=None,
            timeout=None,
            metadata=None):
        return grpc.experimental.unary_stream(request, target, '/grpc_test_server.gRPCTestService/TestUnaryStream',
            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            grpc__test__service__pb2.gRPCTestMessage.FromString,
            options, channel_credentials,
            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

    @staticmethod
    def TestStreamStream(request_iterator,
            target,
            options=(),
            channel_credentials=None,
            call_credentials=None,
            insecure=False,
            compression=None,
            wait_for_ready=None,
            timeout=None,
            metadata=None):
        return grpc.experimental.stream_stream(request_iterator, target, '/grpc_test_server.gRPCTestService/TestStreamStream',
            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            grpc__test__service__pb2.gRPCTestMessage.FromString,
            options, channel_credentials,
            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

    @staticmethod
    def TestStreamUnary(request_iterator,
            target,
            options=(),
            channel_credentials=None,
            call_credentials=None,
            insecure=False,
            compression=None,
            wait_for_ready=None,
            timeout=None,
            metadata=None):
        return grpc.experimental.stream_unary(request_iterator, target, '/grpc_test_server.gRPCTestService/TestStreamUnary',
            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
            grpc__test__service__pb2.gRPCTestMessage.FromString,
            options, channel_credentials,
            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
sentry-python-2.18.0/tests/integrations/grpc/protos/000077500000000000000000000000001471214654000226025ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/grpc/protos/grpc_test_service.proto000066400000000000000000000006311471214654000274010ustar00rootroot00000000000000syntax = "proto3";

package grpc_test_server;

service gRPCTestService{
  rpc TestServe(gRPCTestMessage) returns (gRPCTestMessage);
  rpc TestUnaryStream(gRPCTestMessage) returns (stream gRPCTestMessage);
  rpc TestStreamStream(stream gRPCTestMessage) returns (stream gRPCTestMessage);
  rpc TestStreamUnary(stream gRPCTestMessage) returns (gRPCTestMessage);
}

message gRPCTestMessage {
  string text = 1;
}
sentry-python-2.18.0/tests/integrations/grpc/test_grpc.py000066400000000000000000000266271471214654000236350ustar00rootroot00000000000000import os

import grpc
import pytest

from concurrent import futures
from typing import List, Optional
from unittest.mock import Mock

from sentry_sdk import start_span, start_transaction
from sentry_sdk.consts import OP
from sentry_sdk.integrations.grpc import GRPCIntegration
from tests.conftest import ApproxDict
from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage
from tests.integrations.grpc.grpc_test_service_pb2_grpc import (
    add_gRPCTestServiceServicer_to_server,
    gRPCTestServiceServicer,
    gRPCTestServiceStub,
)


PORT = 50051
PORT += os.getpid() % 100  # avoid port conflicts when running tests in parallel


def _set_up(interceptors: Optional[List[grpc.ServerInterceptor]] = None):
    server = grpc.server(
        futures.ThreadPoolExecutor(max_workers=2),
        interceptors=interceptors,
    )

    add_gRPCTestServiceServicer_to_server(TestService(), server)
    server.add_insecure_port("[::]:{}".format(PORT))
    server.start()

    return server


def _tear_down(server: grpc.Server):
    server.stop(None)


@pytest.mark.forked
def test_grpc_server_starts_transaction(sentry_init, capture_events_forksafe):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        stub.TestServe(gRPCTestMessage(text="test"))

    _tear_down(server=server)

    events.write_file.close()
    event = events.read_event()
    span = event["spans"][0]

    assert event["type"] == "transaction"
    assert event["transaction_info"] == {
        "source": "custom",
    }
    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
    assert span["op"] == "test"


@pytest.mark.forked
def test_grpc_server_other_interceptors(sentry_init, capture_events_forksafe):
    """Ensure compatibility with additional server interceptors."""
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()
    mock_intercept = lambda continuation, handler_call_details: continuation(
        handler_call_details
    )
    mock_interceptor = Mock()
    mock_interceptor.intercept_service.side_effect = mock_intercept

    server = _set_up(interceptors=[mock_interceptor])

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        stub.TestServe(gRPCTestMessage(text="test"))

    _tear_down(server=server)

    mock_interceptor.intercept_service.assert_called_once()

    events.write_file.close()
    event = events.read_event()
    span = event["spans"][0]

    assert event["type"] == "transaction"
    assert event["transaction_info"] == {
        "source": "custom",
    }
    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
    assert span["op"] == "test"


@pytest.mark.forked
def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)

        with start_transaction() as transaction:
            metadata = (
                (
                    "baggage",
                    "sentry-trace_id={trace_id},sentry-environment=test,"
                    "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format(
                        trace_id=transaction.trace_id
                    ),
                ),
                (
                    "sentry-trace",
                    "{trace_id}-{parent_span_id}-{sampled}".format(
                        trace_id=transaction.trace_id,
                        parent_span_id=transaction.span_id,
                        sampled=1,
                    ),
                ),
            )
            stub.TestServe(gRPCTestMessage(text="test"), metadata=metadata)

    _tear_down(server=server)

    events.write_file.close()
    event = events.read_event()
    span = event["spans"][0]

    assert event["type"] == "transaction"
    assert event["transaction_info"] == {
        "source": "custom",
    }
    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert span["op"] == "test"


@pytest.mark.forked
def test_grpc_client_starts_span(sentry_init, capture_events_forksafe):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)

        with start_transaction():
            stub.TestServe(gRPCTestMessage(text="test"))

    _tear_down(server=server)

    events.write_file.close()
    events.read_event()
    local_transaction = events.read_event()
    span = local_transaction["spans"][0]

    assert len(local_transaction["spans"]) == 1
    assert span["op"] == OP.GRPC_CLIENT
    assert (
        span["description"]
        == "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
    )
    assert span["data"] == ApproxDict(
        {
            "type": "unary unary",
            "method": "/grpc_test_server.gRPCTestService/TestServe",
            "code": "OK",
        }
    )


@pytest.mark.forked
def test_grpc_client_unary_stream_starts_span(sentry_init, capture_events_forksafe):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)

        with start_transaction():
            [el for el in stub.TestUnaryStream(gRPCTestMessage(text="test"))]

    _tear_down(server=server)

    events.write_file.close()
    local_transaction = events.read_event()
    span = local_transaction["spans"][0]

    assert len(local_transaction["spans"]) == 1
    assert span["op"] == OP.GRPC_CLIENT
    assert (
        span["description"]
        == "unary stream call to /grpc_test_server.gRPCTestService/TestUnaryStream"
    )
    assert span["data"] == ApproxDict(
        {
            "type": "unary stream",
            "method": "/grpc_test_server.gRPCTestService/TestUnaryStream",
        }
    )


# using unittest.mock.Mock not possible because grpc verifies
# that the interceptor is of the correct type
class MockClientInterceptor(grpc.UnaryUnaryClientInterceptor):
    call_counter = 0

    def intercept_unary_unary(self, continuation, client_call_details, request):
        self.__class__.call_counter += 1
        return continuation(client_call_details, request)


@pytest.mark.forked
def test_grpc_client_other_interceptor(sentry_init, capture_events_forksafe):
    """Ensure compatibility with additional client interceptors."""
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        channel = grpc.intercept_channel(channel, MockClientInterceptor())
        stub = gRPCTestServiceStub(channel)

        with start_transaction():
            stub.TestServe(gRPCTestMessage(text="test"))

    _tear_down(server=server)

    assert MockClientInterceptor.call_counter == 1

    events.write_file.close()
    events.read_event()
    local_transaction = events.read_event()
    span = local_transaction["spans"][0]

    assert len(local_transaction["spans"]) == 1
    assert span["op"] == OP.GRPC_CLIENT
    assert (
        span["description"]
        == "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
    )
    assert span["data"] == ApproxDict(
        {
            "type": "unary unary",
            "method": "/grpc_test_server.gRPCTestService/TestServe",
            "code": "OK",
        }
    )


@pytest.mark.forked
def test_grpc_client_and_servers_interceptors_integration(
    sentry_init, capture_events_forksafe
):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)

        with start_transaction():
            stub.TestServe(gRPCTestMessage(text="test"))

    _tear_down(server=server)

    events.write_file.close()
    server_transaction = events.read_event()
    local_transaction = events.read_event()

    assert (
        server_transaction["contexts"]["trace"]["trace_id"]
        == local_transaction["contexts"]["trace"]["trace_id"]
    )


@pytest.mark.forked
def test_stream_stream(sentry_init):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        response_iterator = stub.TestStreamStream(iter((gRPCTestMessage(text="test"),)))
        for response in response_iterator:
            assert response.text == "test"

    _tear_down(server=server)


@pytest.mark.forked
def test_stream_unary(sentry_init):
    """
    Test to verify stream-stream works.
    Tracing not supported for it yet.
    """
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        response = stub.TestStreamUnary(iter((gRPCTestMessage(text="test"),)))
        assert response.text == "test"

    _tear_down(server=server)


@pytest.mark.forked
def test_span_origin(sentry_init, capture_events_forksafe):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    events = capture_events_forksafe()

    server = _set_up()

    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
        stub = gRPCTestServiceStub(channel)

        with start_transaction(name="custom_transaction"):
            stub.TestServe(gRPCTestMessage(text="test"))

    _tear_down(server=server)

    events.write_file.close()

    transaction_from_integration = events.read_event()
    custom_transaction = events.read_event()

    assert (
        transaction_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc"
    )
    assert (
        transaction_from_integration["spans"][0]["origin"]
        == "auto.grpc.grpc.TestService"
    )  # manually created in TestService, not the instrumentation

    assert custom_transaction["contexts"]["trace"]["origin"] == "manual"
    assert custom_transaction["spans"][0]["origin"] == "auto.grpc.grpc"


class TestService(gRPCTestServiceServicer):
    events = []

    @staticmethod
    def TestServe(request, context):  # noqa: N802
        with start_span(
            op="test",
            name="test",
            origin="auto.grpc.grpc.TestService",
        ):
            pass

        return gRPCTestMessage(text=request.text)

    @staticmethod
    def TestUnaryStream(request, context):  # noqa: N802
        for _ in range(3):
            yield gRPCTestMessage(text=request.text)

    @staticmethod
    def TestStreamStream(request, context):  # noqa: N802
        for r in request:
            yield r

    @staticmethod
    def TestStreamUnary(request, context):  # noqa: N802
        requests = [r for r in request]
        return requests.pop()
sentry-python-2.18.0/tests/integrations/grpc/test_grpc_aio.py000066400000000000000000000233261471214654000244560ustar00rootroot00000000000000import asyncio
import os

import grpc
import pytest
import pytest_asyncio
import sentry_sdk

from sentry_sdk import start_span, start_transaction
from sentry_sdk.consts import OP
from sentry_sdk.integrations.grpc import GRPCIntegration
from tests.conftest import ApproxDict
from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage
from tests.integrations.grpc.grpc_test_service_pb2_grpc import (
    add_gRPCTestServiceServicer_to_server,
    gRPCTestServiceServicer,
    gRPCTestServiceStub,
)

AIO_PORT = 50052
AIO_PORT += os.getpid() % 100  # avoid port conflicts when running tests in parallel


@pytest.fixture(scope="function")
def event_loop(request):
    """Create an instance of the default event loop for each test case."""
    loop = asyncio.new_event_loop()
    yield loop
    loop.close()


@pytest_asyncio.fixture(scope="function")
async def grpc_server(sentry_init, event_loop):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    server = grpc.aio.server()
    server.add_insecure_port("[::]:{}".format(AIO_PORT))
    add_gRPCTestServiceServicer_to_server(TestService, server)

    await event_loop.create_task(server.start())

    try:
        yield server
    finally:
        await server.stop(None)


@pytest.mark.asyncio
async def test_noop_for_unimplemented_method(event_loop, sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
    server = grpc.aio.server()
    server.add_insecure_port("[::]:{}".format(AIO_PORT))

    await event_loop.create_task(server.start())

    events = capture_events()
    try:
        async with grpc.aio.insecure_channel(
            "localhost:{}".format(AIO_PORT)
        ) as channel:
            stub = gRPCTestServiceStub(channel)
            with pytest.raises(grpc.RpcError) as exc:
                await stub.TestServe(gRPCTestMessage(text="test"))
            assert exc.value.details() == "Method not found!"
    finally:
        await server.stop(None)

    assert not events


@pytest.mark.asyncio
async def test_grpc_server_starts_transaction(grpc_server, capture_events):
    events = capture_events()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        await stub.TestServe(gRPCTestMessage(text="test"))

    (event,) = events
    span = event["spans"][0]

    assert event["type"] == "transaction"
    assert event["transaction_info"] == {
        "source": "custom",
    }
    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
    assert span["op"] == "test"


@pytest.mark.asyncio
async def test_grpc_server_continues_transaction(grpc_server, capture_events):
    events = capture_events()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)

        with sentry_sdk.start_transaction() as transaction:
            metadata = (
                (
                    "baggage",
                    "sentry-trace_id={trace_id},sentry-environment=test,"
                    "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format(
                        trace_id=transaction.trace_id
                    ),
                ),
                (
                    "sentry-trace",
                    "{trace_id}-{parent_span_id}-{sampled}".format(
                        trace_id=transaction.trace_id,
                        parent_span_id=transaction.span_id,
                        sampled=1,
                    ),
                ),
            )

            await stub.TestServe(gRPCTestMessage(text="test"), metadata=metadata)

    (event, _) = events
    span = event["spans"][0]

    assert event["type"] == "transaction"
    assert event["transaction_info"] == {
        "source": "custom",
    }
    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert span["op"] == "test"


@pytest.mark.asyncio
async def test_grpc_server_exception(grpc_server, capture_events):
    events = capture_events()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        try:
            await stub.TestServe(gRPCTestMessage(text="exception"))
            raise AssertionError()
        except Exception:
            pass

    (event, _) = events

    assert event["exception"]["values"][0]["type"] == "TestService.TestException"
    assert event["exception"]["values"][0]["value"] == "test"
    assert event["exception"]["values"][0]["mechanism"]["handled"] is False
    assert event["exception"]["values"][0]["mechanism"]["type"] == "grpc"


@pytest.mark.asyncio
async def test_grpc_server_abort(grpc_server, capture_events):
    events = capture_events()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        try:
            await stub.TestServe(gRPCTestMessage(text="abort"))
            raise AssertionError()
        except Exception:
            pass

    assert len(events) == 1


@pytest.mark.asyncio
async def test_grpc_client_starts_span(grpc_server, capture_events_forksafe):
    events = capture_events_forksafe()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        with start_transaction():
            await stub.TestServe(gRPCTestMessage(text="test"))

    events.write_file.close()
    events.read_event()
    local_transaction = events.read_event()
    span = local_transaction["spans"][0]

    assert len(local_transaction["spans"]) == 1
    assert span["op"] == OP.GRPC_CLIENT
    assert (
        span["description"]
        == "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
    )
    assert span["data"] == ApproxDict(
        {
            "type": "unary unary",
            "method": "/grpc_test_server.gRPCTestService/TestServe",
            "code": "OK",
        }
    )


@pytest.mark.asyncio
async def test_grpc_client_unary_stream_starts_span(
    grpc_server, capture_events_forksafe
):
    events = capture_events_forksafe()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        with start_transaction():
            response = stub.TestUnaryStream(gRPCTestMessage(text="test"))
            [_ async for _ in response]

    events.write_file.close()
    local_transaction = events.read_event()
    span = local_transaction["spans"][0]

    assert len(local_transaction["spans"]) == 1
    assert span["op"] == OP.GRPC_CLIENT
    assert (
        span["description"]
        == "unary stream call to /grpc_test_server.gRPCTestService/TestUnaryStream"
    )
    assert span["data"] == ApproxDict(
        {
            "type": "unary stream",
            "method": "/grpc_test_server.gRPCTestService/TestUnaryStream",
        }
    )


@pytest.mark.asyncio
async def test_stream_stream(grpc_server):
    """
    Test to verify stream-stream works.
    Tracing not supported for it yet.
    """
    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        response = stub.TestStreamStream((gRPCTestMessage(text="test"),))
        async for r in response:
            assert r.text == "test"


@pytest.mark.asyncio
async def test_stream_unary(grpc_server):
    """
    Test to verify stream-stream works.
    Tracing not supported for it yet.
    """
    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        response = await stub.TestStreamUnary((gRPCTestMessage(text="test"),))
        assert response.text == "test"


@pytest.mark.asyncio
async def test_span_origin(grpc_server, capture_events_forksafe):
    events = capture_events_forksafe()

    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
        stub = gRPCTestServiceStub(channel)
        with start_transaction(name="custom_transaction"):
            await stub.TestServe(gRPCTestMessage(text="test"))

    events.write_file.close()

    transaction_from_integration = events.read_event()
    custom_transaction = events.read_event()

    assert (
        transaction_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc"
    )
    assert (
        transaction_from_integration["spans"][0]["origin"]
        == "auto.grpc.grpc.TestService.aio"
    )  # manually created in TestService, not the instrumentation

    assert custom_transaction["contexts"]["trace"]["origin"] == "manual"
    assert custom_transaction["spans"][0]["origin"] == "auto.grpc.grpc"


class TestService(gRPCTestServiceServicer):
    class TestException(Exception):
        __test__ = False

        def __init__(self):
            super().__init__("test")

    @classmethod
    async def TestServe(cls, request, context):  # noqa: N802
        with start_span(
            op="test",
            name="test",
            origin="auto.grpc.grpc.TestService.aio",
        ):
            pass

        if request.text == "exception":
            raise cls.TestException()

        if request.text == "abort":
            await context.abort(grpc.StatusCode.ABORTED)

        return gRPCTestMessage(text=request.text)

    @classmethod
    async def TestUnaryStream(cls, request, context):  # noqa: N802
        for _ in range(3):
            yield gRPCTestMessage(text=request.text)

    @classmethod
    async def TestStreamStream(cls, request, context):  # noqa: N802
        async for r in request:
            yield r

    @classmethod
    async def TestStreamUnary(cls, request, context):  # noqa: N802
        requests = [r async for r in request]
        return requests.pop()
sentry-python-2.18.0/tests/integrations/httpx/000077500000000000000000000000001471214654000214705ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/httpx/__init__.py000066400000000000000000000000541471214654000236000ustar00rootroot00000000000000import pytest

pytest.importorskip("httpx")
sentry-python-2.18.0/tests/integrations/httpx/test_httpx.py000066400000000000000000000237161471214654000242610ustar00rootroot00000000000000import asyncio
from unittest import mock

import httpx
import pytest
import responses

import sentry_sdk
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import MATCH_ALL, SPANDATA
from sentry_sdk.integrations.httpx import HttpxIntegration
from tests.conftest import ApproxDict


@pytest.mark.parametrize(
    "httpx_client",
    (httpx.Client(), httpx.AsyncClient()),
)
def test_crumb_capture_and_hint(sentry_init, capture_events, httpx_client):
    def before_breadcrumb(crumb, hint):
        crumb["data"]["extra"] = "foo"
        return crumb

    sentry_init(integrations=[HttpxIntegration()], before_breadcrumb=before_breadcrumb)

    url = "http://example.com/"
    responses.add(responses.GET, url, status=200)

    with start_transaction():
        events = capture_events()

        if asyncio.iscoroutinefunction(httpx_client.get):
            response = asyncio.get_event_loop().run_until_complete(
                httpx_client.get(url)
            )
        else:
            response = httpx_client.get(url)

        assert response.status_code == 200
        capture_message("Testing!")

        (event,) = events

        crumb = event["breadcrumbs"]["values"][0]
        assert crumb["type"] == "http"
        assert crumb["category"] == "httplib"
        assert crumb["data"] == ApproxDict(
            {
                "url": url,
                SPANDATA.HTTP_METHOD: "GET",
                SPANDATA.HTTP_FRAGMENT: "",
                SPANDATA.HTTP_QUERY: "",
                SPANDATA.HTTP_STATUS_CODE: 200,
                "reason": "OK",
                "extra": "foo",
            }
        )


@pytest.mark.parametrize(
    "httpx_client",
    (httpx.Client(), httpx.AsyncClient()),
)
def test_outgoing_trace_headers(sentry_init, httpx_client):
    sentry_init(traces_sample_rate=1.0, integrations=[HttpxIntegration()])

    url = "http://example.com/"
    responses.add(responses.GET, url, status=200)

    with start_transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="01234567890123456789012345678901",
    ) as transaction:
        if asyncio.iscoroutinefunction(httpx_client.get):
            response = asyncio.get_event_loop().run_until_complete(
                httpx_client.get(url)
            )
        else:
            response = httpx_client.get(url)

        request_span = transaction._span_recorder.spans[-1]
        assert response.request.headers[
            "sentry-trace"
        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
            trace_id=transaction.trace_id,
            parent_span_id=request_span.span_id,
            sampled=1,
        )


@pytest.mark.parametrize(
    "httpx_client",
    (httpx.Client(), httpx.AsyncClient()),
)
def test_outgoing_trace_headers_append_to_baggage(sentry_init, httpx_client):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[HttpxIntegration()],
        release="d08ebdb9309e1b004c6f52202de58a09c2268e42",
    )

    url = "http://example.com/"
    responses.add(responses.GET, url, status=200)

    with start_transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="01234567890123456789012345678901",
    ) as transaction:
        if asyncio.iscoroutinefunction(httpx_client.get):
            response = asyncio.get_event_loop().run_until_complete(
                httpx_client.get(url, headers={"baGGage": "custom=data"})
            )
        else:
            response = httpx_client.get(url, headers={"baGGage": "custom=data"})

        request_span = transaction._span_recorder.spans[-1]
        assert response.request.headers[
            "sentry-trace"
        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
            trace_id=transaction.trace_id,
            parent_span_id=request_span.span_id,
            sampled=1,
        )
        assert (
            response.request.headers["baggage"]
            == "custom=data,sentry-trace_id=01234567890123456789012345678901,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true"
        )


@pytest.mark.parametrize(
    "httpx_client,trace_propagation_targets,url,trace_propagated",
    [
        [
            httpx.Client(),
            None,
            "https://example.com/",
            False,
        ],
        [
            httpx.Client(),
            [],
            "https://example.com/",
            False,
        ],
        [
            httpx.Client(),
            [MATCH_ALL],
            "https://example.com/",
            True,
        ],
        [
            httpx.Client(),
            ["https://example.com/"],
            "https://example.com/",
            True,
        ],
        [
            httpx.Client(),
            ["https://example.com/"],
            "https://example.com",
            False,
        ],
        [
            httpx.Client(),
            ["https://example.com"],
            "https://example.com",
            True,
        ],
        [
            httpx.Client(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://example.net",
            False,
        ],
        [
            httpx.Client(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://good.example.net",
            True,
        ],
        [
            httpx.Client(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://good.example.net/some/thing",
            True,
        ],
        [
            httpx.AsyncClient(),
            None,
            "https://example.com/",
            False,
        ],
        [
            httpx.AsyncClient(),
            [],
            "https://example.com/",
            False,
        ],
        [
            httpx.AsyncClient(),
            [MATCH_ALL],
            "https://example.com/",
            True,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com/"],
            "https://example.com/",
            True,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com/"],
            "https://example.com",
            False,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com"],
            "https://example.com",
            True,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://example.net",
            False,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://good.example.net",
            True,
        ],
        [
            httpx.AsyncClient(),
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "https://good.example.net/some/thing",
            True,
        ],
    ],
)
def test_option_trace_propagation_targets(
    sentry_init,
    httpx_client,
    httpx_mock,  # this comes from pytest-httpx
    trace_propagation_targets,
    url,
    trace_propagated,
):
    httpx_mock.add_response()

    sentry_init(
        release="test",
        trace_propagation_targets=trace_propagation_targets,
        traces_sample_rate=1.0,
        integrations=[HttpxIntegration()],
    )

    with sentry_sdk.start_transaction():  # Must be in a transaction to propagate headers
        if asyncio.iscoroutinefunction(httpx_client.get):
            asyncio.get_event_loop().run_until_complete(httpx_client.get(url))
        else:
            httpx_client.get(url)

    request_headers = httpx_mock.get_request().headers

    if trace_propagated:
        assert "sentry-trace" in request_headers
    else:
        assert "sentry-trace" not in request_headers


def test_do_not_propagate_outside_transaction(sentry_init, httpx_mock):
    httpx_mock.add_response()

    sentry_init(
        traces_sample_rate=1.0,
        trace_propagation_targets=[MATCH_ALL],
        integrations=[HttpxIntegration()],
    )

    httpx_client = httpx.Client()
    httpx_client.get("http://example.com/")

    request_headers = httpx_mock.get_request().headers
    assert "sentry-trace" not in request_headers


@pytest.mark.tests_internal_exceptions
def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
    sentry_init(integrations=[HttpxIntegration()])

    httpx_client = httpx.Client()
    url = "http://example.com"
    responses.add(responses.GET, url, status=200)

    events = capture_events()
    with mock.patch(
        "sentry_sdk.integrations.httpx.parse_url",
        side_effect=ValueError,
    ):
        response = httpx_client.get(url)

    assert response.status_code == 200
    capture_message("Testing!")

    (event,) = events
    assert event["breadcrumbs"]["values"][0]["data"] == ApproxDict(
        {
            SPANDATA.HTTP_METHOD: "GET",
            SPANDATA.HTTP_STATUS_CODE: 200,
            "reason": "OK",
            # no url related data
        }
    )

    assert "url" not in event["breadcrumbs"]["values"][0]["data"]
    assert SPANDATA.HTTP_FRAGMENT not in event["breadcrumbs"]["values"][0]["data"]
    assert SPANDATA.HTTP_QUERY not in event["breadcrumbs"]["values"][0]["data"]


@pytest.mark.parametrize(
    "httpx_client",
    (httpx.Client(), httpx.AsyncClient()),
)
def test_span_origin(sentry_init, capture_events, httpx_client):
    sentry_init(
        integrations=[HttpxIntegration()],
        traces_sample_rate=1.0,
    )

    events = capture_events()

    url = "http://example.com/"
    responses.add(responses.GET, url, status=200)

    with start_transaction(name="test_transaction"):
        if asyncio.iscoroutinefunction(httpx_client.get):
            asyncio.get_event_loop().run_until_complete(httpx_client.get(url))
        else:
            httpx_client.get(url)

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.http.httpx"
sentry-python-2.18.0/tests/integrations/huey/000077500000000000000000000000001471214654000212735ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/huey/__init__.py000066400000000000000000000000531471214654000234020ustar00rootroot00000000000000import pytest

pytest.importorskip("huey")
sentry-python-2.18.0/tests/integrations/huey/test_huey.py000066400000000000000000000131771471214654000236670ustar00rootroot00000000000000import pytest
from decimal import DivisionByZero

from sentry_sdk import start_transaction
from sentry_sdk.integrations.huey import HueyIntegration
from sentry_sdk.utils import parse_version

from huey import __version__ as HUEY_VERSION
from huey.api import MemoryHuey, Result
from huey.exceptions import RetryTask


HUEY_VERSION = parse_version(HUEY_VERSION)


@pytest.fixture
def init_huey(sentry_init):
    def inner():
        sentry_init(
            integrations=[HueyIntegration()],
            traces_sample_rate=1.0,
            send_default_pii=True,
        )

        return MemoryHuey(name="sentry_sdk")

    return inner


@pytest.fixture(autouse=True)
def flush_huey_tasks(init_huey):
    huey = init_huey()
    huey.flush()


def execute_huey_task(huey, func, *args, **kwargs):
    exceptions = kwargs.pop("exceptions", None)
    result = func(*args, **kwargs)
    task = huey.dequeue()
    if exceptions is not None:
        try:
            huey.execute(task)
        except exceptions:
            pass
    else:
        huey.execute(task)
    return result


def test_task_result(init_huey):
    huey = init_huey()

    @huey.task()
    def increase(num):
        return num + 1

    result = increase(3)

    assert isinstance(result, Result)
    assert len(huey) == 1
    task = huey.dequeue()
    assert huey.execute(task) == 4
    assert result.get() == 4


@pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
def test_task_transaction(capture_events, init_huey, task_fails):
    huey = init_huey()

    @huey.task()
    def division(a, b):
        return a / b

    events = capture_events()
    execute_huey_task(
        huey, division, 1, int(not task_fails), exceptions=(DivisionByZero,)
    )

    if task_fails:
        error_event = events.pop(0)
        assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
        assert error_event["exception"]["values"][0]["mechanism"]["type"] == "huey"

    (event,) = events
    assert event["type"] == "transaction"
    assert event["transaction"] == "division"
    assert event["transaction_info"] == {"source": "task"}

    if task_fails:
        assert event["contexts"]["trace"]["status"] == "internal_error"
    else:
        assert event["contexts"]["trace"]["status"] == "ok"

    assert "huey_task_id" in event["tags"]
    assert "huey_task_retry" in event["tags"]


def test_task_retry(capture_events, init_huey):
    huey = init_huey()
    context = {"retry": True}

    @huey.task()
    def retry_task(context):
        if context["retry"]:
            context["retry"] = False
            raise RetryTask()

    events = capture_events()
    result = execute_huey_task(huey, retry_task, context)
    (event,) = events

    assert event["transaction"] == "retry_task"
    assert event["tags"]["huey_task_id"] == result.task.id
    assert len(huey) == 1

    task = huey.dequeue()
    huey.execute(task)
    (event, _) = events

    assert event["transaction"] == "retry_task"
    assert event["tags"]["huey_task_id"] == result.task.id
    assert len(huey) == 0


@pytest.mark.parametrize("lock_name", ["lock.a", "lock.b"], ids=["locked", "unlocked"])
@pytest.mark.skipif(HUEY_VERSION < (2, 5), reason="is_locked was added in 2.5")
def test_task_lock(capture_events, init_huey, lock_name):
    huey = init_huey()

    task_lock_name = "lock.a"
    should_be_locked = task_lock_name == lock_name

    @huey.task()
    @huey.lock_task(task_lock_name)
    def maybe_locked_task():
        pass

    events = capture_events()

    with huey.lock_task(lock_name):
        assert huey.is_locked(task_lock_name) == should_be_locked
        result = execute_huey_task(huey, maybe_locked_task)

    (event,) = events

    assert event["transaction"] == "maybe_locked_task"
    assert event["tags"]["huey_task_id"] == result.task.id
    assert (
        event["contexts"]["trace"]["status"] == "aborted" if should_be_locked else "ok"
    )
    assert len(huey) == 0


def test_huey_enqueue(init_huey, capture_events):
    huey = init_huey()

    @huey.task(name="different_task_name")
    def dummy_task():
        pass

    events = capture_events()

    with start_transaction() as transaction:
        dummy_task()

    (event,) = events

    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
    assert event["contexts"]["trace"]["span_id"] == transaction.span_id

    assert len(event["spans"])
    assert event["spans"][0]["op"] == "queue.submit.huey"
    assert event["spans"][0]["description"] == "different_task_name"


def test_huey_propagate_trace(init_huey, capture_events):
    huey = init_huey()

    events = capture_events()

    @huey.task()
    def propagated_trace_task():
        pass

    with start_transaction() as outer_transaction:
        execute_huey_task(huey, propagated_trace_task)

    assert (
        events[0]["transaction"] == "propagated_trace_task"
    )  # the "inner" transaction
    assert events[0]["contexts"]["trace"]["trace_id"] == outer_transaction.trace_id


def test_span_origin_producer(init_huey, capture_events):
    huey = init_huey()

    @huey.task(name="different_task_name")
    def dummy_task():
        pass

    events = capture_events()

    with start_transaction():
        dummy_task()

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.queue.huey"


def test_span_origin_consumer(init_huey, capture_events):
    huey = init_huey()

    events = capture_events()

    @huey.task()
    def propagated_trace_task():
        pass

    execute_huey_task(huey, propagated_trace_task)

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "auto.queue.huey"
sentry-python-2.18.0/tests/integrations/huggingface_hub/000077500000000000000000000000001471214654000234265ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/huggingface_hub/__init__.py000066400000000000000000000000661471214654000255410ustar00rootroot00000000000000import pytest

pytest.importorskip("huggingface_hub")
sentry-python-2.18.0/tests/integrations/huggingface_hub/test_huggingface_hub.py000066400000000000000000000122641471214654000301510ustar00rootroot00000000000000import itertools

import pytest
from huggingface_hub import (
    InferenceClient,
)
from huggingface_hub.errors import OverloadedError

from sentry_sdk import start_transaction
from sentry_sdk.integrations.huggingface_hub import HuggingfaceHubIntegration

from unittest import mock  # python 3.3 and above


@pytest.mark.parametrize(
    "send_default_pii, include_prompts, details_arg",
    itertools.product([True, False], repeat=3),
)
def test_nonstreaming_chat_completion(
    sentry_init, capture_events, send_default_pii, include_prompts, details_arg
):
    sentry_init(
        integrations=[HuggingfaceHubIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = InferenceClient("some-model")
    if details_arg:
        client.post = mock.Mock(
            return_value=b"""[{
                "generated_text": "the model response",
                "details": {
                    "finish_reason": "length",
                    "generated_tokens": 10,
                    "prefill": [],
                    "tokens": []
                }
            }]"""
        )
    else:
        client.post = mock.Mock(
            return_value=b'[{"generated_text": "the model response"}]'
        )
    with start_transaction(name="huggingface_hub tx"):
        response = client.text_generation(
            prompt="hello",
            details=details_arg,
            stream=False,
        )
    if details_arg:
        assert response.generated_text == "the model response"
    else:
        assert response == "the model response"
    tx = events[0]
    assert tx["type"] == "transaction"
    span = tx["spans"][0]
    assert span["op"] == "ai.chat_completions.create.huggingface_hub"

    if send_default_pii and include_prompts:
        assert "hello" in span["data"]["ai.input_messages"]
        assert "the model response" in span["data"]["ai.responses"]
    else:
        assert "ai.input_messages" not in span["data"]
        assert "ai.responses" not in span["data"]

    if details_arg:
        assert span["measurements"]["ai_total_tokens_used"]["value"] == 10


@pytest.mark.parametrize(
    "send_default_pii, include_prompts, details_arg",
    itertools.product([True, False], repeat=3),
)
def test_streaming_chat_completion(
    sentry_init, capture_events, send_default_pii, include_prompts, details_arg
):
    sentry_init(
        integrations=[HuggingfaceHubIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = InferenceClient("some-model")
    client.post = mock.Mock(
        return_value=[
            b"""data:{
                "token":{"id":1, "special": false, "text": "the model "}
            }""",
            b"""data:{
                "token":{"id":2, "special": false, "text": "response"},
                "details":{"finish_reason": "length", "generated_tokens": 10, "seed": 0}
            }""",
        ]
    )
    with start_transaction(name="huggingface_hub tx"):
        response = list(
            client.text_generation(
                prompt="hello",
                details=details_arg,
                stream=True,
            )
        )
    assert len(response) == 2
    print(response)
    if details_arg:
        assert response[0].token.text + response[1].token.text == "the model response"
    else:
        assert response[0] + response[1] == "the model response"

    tx = events[0]
    assert tx["type"] == "transaction"
    span = tx["spans"][0]
    assert span["op"] == "ai.chat_completions.create.huggingface_hub"

    if send_default_pii and include_prompts:
        assert "hello" in span["data"]["ai.input_messages"]
        assert "the model response" in span["data"]["ai.responses"]
    else:
        assert "ai.input_messages" not in span["data"]
        assert "ai.responses" not in span["data"]

    if details_arg:
        assert span["measurements"]["ai_total_tokens_used"]["value"] == 10


def test_bad_chat_completion(sentry_init, capture_events):
    sentry_init(integrations=[HuggingfaceHubIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    client = InferenceClient("some-model")
    client.post = mock.Mock(side_effect=OverloadedError("The server is overloaded"))
    with pytest.raises(OverloadedError):
        client.text_generation(prompt="hello")

    (event,) = events
    assert event["level"] == "error"


def test_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[HuggingfaceHubIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = InferenceClient("some-model")
    client.post = mock.Mock(
        return_value=[
            b"""data:{
                "token":{"id":1, "special": false, "text": "the model "}
            }""",
        ]
    )
    with start_transaction(name="huggingface_hub tx"):
        list(
            client.text_generation(
                prompt="hello",
                stream=True,
            )
        )

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.ai.huggingface_hub"
sentry-python-2.18.0/tests/integrations/langchain/000077500000000000000000000000001471214654000222455ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/langchain/__init__.py000066400000000000000000000000651471214654000243570ustar00rootroot00000000000000import pytest

pytest.importorskip("langchain_core")
sentry-python-2.18.0/tests/integrations/langchain/test_langchain.py000066400000000000000000000271671471214654000256170ustar00rootroot00000000000000from typing import List, Optional, Any, Iterator
from unittest.mock import Mock

import pytest

try:
    # Langchain >= 0.2
    from langchain_openai import ChatOpenAI
except ImportError:
    # Langchain < 0.2
    from langchain_community.chat_models import ChatOpenAI

from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.messages import BaseMessage, AIMessageChunk
from langchain_core.outputs import ChatGenerationChunk

from sentry_sdk import start_transaction
from sentry_sdk.integrations.langchain import LangchainIntegration
from langchain.agents import tool, AgentExecutor, create_openai_tools_agent
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder


@tool
def get_word_length(word: str) -> int:
    """Returns the length of a word."""
    return len(word)


global stream_result_mock  # type: Mock
global llm_type  # type: str


class MockOpenAI(ChatOpenAI):
    def _stream(
        self,
        messages: List[BaseMessage],
        stop: Optional[List[str]] = None,
        run_manager: Optional[CallbackManagerForLLMRun] = None,
        **kwargs: Any,
    ) -> Iterator[ChatGenerationChunk]:
        for x in stream_result_mock():
            yield x

    @property
    def _llm_type(self) -> str:
        return llm_type


def tiktoken_encoding_if_installed():
    try:
        import tiktoken  # type: ignore # noqa # pylint: disable=unused-import

        return "cl100k_base"
    except ImportError:
        return None


@pytest.mark.parametrize(
    "send_default_pii, include_prompts, use_unknown_llm_type",
    [
        (True, True, False),
        (True, False, False),
        (False, True, False),
        (False, False, True),
    ],
)
def test_langchain_agent(
    sentry_init, capture_events, send_default_pii, include_prompts, use_unknown_llm_type
):
    global llm_type
    llm_type = "acme-llm" if use_unknown_llm_type else "openai-chat"

    sentry_init(
        integrations=[
            LangchainIntegration(
                include_prompts=include_prompts,
                tiktoken_encoding_name=tiktoken_encoding_if_installed(),
            )
        ],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    prompt = ChatPromptTemplate.from_messages(
        [
            (
                "system",
                "You are very powerful assistant, but don't know current events",
            ),
            ("user", "{input}"),
            MessagesPlaceholder(variable_name="agent_scratchpad"),
        ]
    )
    global stream_result_mock
    stream_result_mock = Mock(
        side_effect=[
            [
                ChatGenerationChunk(
                    type="ChatGenerationChunk",
                    message=AIMessageChunk(
                        content="",
                        additional_kwargs={
                            "tool_calls": [
                                {
                                    "index": 0,
                                    "id": "call_BbeyNhCKa6kYLYzrD40NGm3b",
                                    "function": {
                                        "arguments": "",
                                        "name": "get_word_length",
                                    },
                                    "type": "function",
                                }
                            ]
                        },
                    ),
                ),
                ChatGenerationChunk(
                    type="ChatGenerationChunk",
                    message=AIMessageChunk(
                        content="",
                        additional_kwargs={
                            "tool_calls": [
                                {
                                    "index": 0,
                                    "id": None,
                                    "function": {
                                        "arguments": '{"word": "eudca"}',
                                        "name": None,
                                    },
                                    "type": None,
                                }
                            ]
                        },
                    ),
                ),
                ChatGenerationChunk(
                    type="ChatGenerationChunk",
                    message=AIMessageChunk(content="5"),
                    generation_info={"finish_reason": "function_call"},
                ),
            ],
            [
                ChatGenerationChunk(
                    text="The word eudca has 5 letters.",
                    type="ChatGenerationChunk",
                    message=AIMessageChunk(content="The word eudca has 5 letters."),
                ),
                ChatGenerationChunk(
                    type="ChatGenerationChunk",
                    generation_info={"finish_reason": "stop"},
                    message=AIMessageChunk(content=""),
                ),
            ],
        ]
    )
    llm = MockOpenAI(
        model_name="gpt-3.5-turbo",
        temperature=0,
        openai_api_key="badkey",
    )
    agent = create_openai_tools_agent(llm, [get_word_length], prompt)

    agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True)

    with start_transaction():
        list(agent_executor.stream({"input": "How many letters in the word eudca"}))

    tx = events[0]
    assert tx["type"] == "transaction"
    chat_spans = list(
        x for x in tx["spans"] if x["op"] == "ai.chat_completions.create.langchain"
    )
    tool_exec_span = next(x for x in tx["spans"] if x["op"] == "ai.tool.langchain")

    assert len(chat_spans) == 2

    # We can't guarantee anything about the "shape" of the langchain execution graph
    assert len(list(x for x in tx["spans"] if x["op"] == "ai.run.langchain")) > 0

    if use_unknown_llm_type:
        assert "ai_prompt_tokens_used" in chat_spans[0]["measurements"]
        assert "ai_total_tokens_used" in chat_spans[0]["measurements"]
    else:
        # important: to avoid double counting, we do *not* measure
        # tokens used if we have an explicit integration (e.g. OpenAI)
        assert "measurements" not in chat_spans[0]

    if send_default_pii and include_prompts:
        assert (
            "You are very powerful"
            in chat_spans[0]["data"]["ai.input_messages"][0]["content"]
        )
        assert "5" in chat_spans[0]["data"]["ai.responses"]
        assert "word" in tool_exec_span["data"]["ai.input_messages"]
        assert 5 == int(tool_exec_span["data"]["ai.responses"])
        assert (
            "You are very powerful"
            in chat_spans[1]["data"]["ai.input_messages"][0]["content"]
        )
        assert "5" in chat_spans[1]["data"]["ai.responses"]
    else:
        assert "ai.input_messages" not in chat_spans[0].get("data", {})
        assert "ai.responses" not in chat_spans[0].get("data", {})
        assert "ai.input_messages" not in chat_spans[1].get("data", {})
        assert "ai.responses" not in chat_spans[1].get("data", {})
        assert "ai.input_messages" not in tool_exec_span.get("data", {})
        assert "ai.responses" not in tool_exec_span.get("data", {})


def test_langchain_error(sentry_init, capture_events):
    sentry_init(
        integrations=[LangchainIntegration(include_prompts=True)],
        traces_sample_rate=1.0,
        send_default_pii=True,
    )
    events = capture_events()

    prompt = ChatPromptTemplate.from_messages(
        [
            (
                "system",
                "You are very powerful assistant, but don't know current events",
            ),
            ("user", "{input}"),
            MessagesPlaceholder(variable_name="agent_scratchpad"),
        ]
    )
    global stream_result_mock
    stream_result_mock = Mock(side_effect=Exception("API rate limit error"))
    llm = MockOpenAI(
        model_name="gpt-3.5-turbo",
        temperature=0,
        openai_api_key="badkey",
    )
    agent = create_openai_tools_agent(llm, [get_word_length], prompt)

    agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True)

    with start_transaction(), pytest.raises(Exception):
        list(agent_executor.stream({"input": "How many letters in the word eudca"}))

    error = events[0]
    assert error["level"] == "error"


def test_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[LangchainIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    prompt = ChatPromptTemplate.from_messages(
        [
            (
                "system",
                "You are very powerful assistant, but don't know current events",
            ),
            ("user", "{input}"),
            MessagesPlaceholder(variable_name="agent_scratchpad"),
        ]
    )
    global stream_result_mock
    stream_result_mock = Mock(
        side_effect=[
            [
                ChatGenerationChunk(
                    type="ChatGenerationChunk",
                    message=AIMessageChunk(
                        content="",
                        additional_kwargs={
                            "tool_calls": [
                                {
                                    "index": 0,
                                    "id": "call_BbeyNhCKa6kYLYzrD40NGm3b",
                                    "function": {
                                        "arguments": "",
                                        "name": "get_word_length",
                                    },
                                    "type": "function",
                                }
                            ]
                        },
                    ),
                ),
                ChatGenerationChunk(
                    type="ChatGenerationChunk",
                    message=AIMessageChunk(
                        content="",
                        additional_kwargs={
                            "tool_calls": [
                                {
                                    "index": 0,
                                    "id": None,
                                    "function": {
                                        "arguments": '{"word": "eudca"}',
                                        "name": None,
                                    },
                                    "type": None,
                                }
                            ]
                        },
                    ),
                ),
                ChatGenerationChunk(
                    type="ChatGenerationChunk",
                    message=AIMessageChunk(content="5"),
                    generation_info={"finish_reason": "function_call"},
                ),
            ],
            [
                ChatGenerationChunk(
                    text="The word eudca has 5 letters.",
                    type="ChatGenerationChunk",
                    message=AIMessageChunk(content="The word eudca has 5 letters."),
                ),
                ChatGenerationChunk(
                    type="ChatGenerationChunk",
                    generation_info={"finish_reason": "stop"},
                    message=AIMessageChunk(content=""),
                ),
            ],
        ]
    )
    llm = MockOpenAI(
        model_name="gpt-3.5-turbo",
        temperature=0,
        openai_api_key="badkey",
    )
    agent = create_openai_tools_agent(llm, [get_word_length], prompt)

    agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True)

    with start_transaction():
        list(agent_executor.stream({"input": "How many letters in the word eudca"}))

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    for span in event["spans"]:
        assert span["origin"] == "auto.ai.langchain"
sentry-python-2.18.0/tests/integrations/launchdarkly/000077500000000000000000000000001471214654000230025ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/launchdarkly/__init__.py000066400000000000000000000000571471214654000251150ustar00rootroot00000000000000import pytest

pytest.importorskip("ldclient")
sentry-python-2.18.0/tests/integrations/launchdarkly/test_launchdarkly.py000066400000000000000000000101131471214654000270700ustar00rootroot00000000000000import asyncio
import concurrent.futures as cf

import ldclient

import sentry_sdk
import pytest

from ldclient import LDClient
from ldclient.config import Config
from ldclient.context import Context
from ldclient.integrations.test_data import TestData

from sentry_sdk.integrations import DidNotEnable
from sentry_sdk.integrations.launchdarkly import LaunchDarklyIntegration


@pytest.mark.parametrize(
    "use_global_client",
    (False, True),
)
def test_launchdarkly_integration(sentry_init, use_global_client):
    td = TestData.data_source()
    config = Config("sdk-key", update_processor_class=td)
    if use_global_client:
        ldclient.set_config(config)
        sentry_init(integrations=[LaunchDarklyIntegration()])
        client = ldclient.get()
    else:
        client = LDClient(config=config)
        sentry_init(integrations=[LaunchDarklyIntegration(ld_client=client)])

    # Set test values
    td.update(td.flag("hello").variation_for_all(True))
    td.update(td.flag("world").variation_for_all(True))

    # Evaluate
    client.variation("hello", Context.create("my-org", "organization"), False)
    client.variation("world", Context.create("user1", "user"), False)
    client.variation("other", Context.create("user2", "user"), False)

    assert sentry_sdk.get_current_scope().flags.get() == [
        {"flag": "hello", "result": True},
        {"flag": "world", "result": True},
        {"flag": "other", "result": False},
    ]


def test_launchdarkly_integration_threaded(sentry_init):
    td = TestData.data_source()
    client = LDClient(config=Config("sdk-key", update_processor_class=td))
    sentry_init(integrations=[LaunchDarklyIntegration(ld_client=client)])
    context = Context.create("user1")

    def task(flag_key):
        # Creates a new isolation scope for the thread.
        # This means the evaluations in each task are captured separately.
        with sentry_sdk.isolation_scope():
            client.variation(flag_key, context, False)
            return [f["flag"] for f in sentry_sdk.get_current_scope().flags.get()]

    td.update(td.flag("hello").variation_for_all(True))
    td.update(td.flag("world").variation_for_all(False))
    # Capture an eval before we split isolation scopes.
    client.variation("hello", context, False)

    with cf.ThreadPoolExecutor(max_workers=2) as pool:
        results = list(pool.map(task, ["world", "other"]))

    assert results[0] == ["hello", "world"]
    assert results[1] == ["hello", "other"]


def test_launchdarkly_integration_asyncio(sentry_init):
    """Assert concurrently evaluated flags do not pollute one another."""
    td = TestData.data_source()
    client = LDClient(config=Config("sdk-key", update_processor_class=td))
    sentry_init(integrations=[LaunchDarklyIntegration(ld_client=client)])
    context = Context.create("user1")

    async def task(flag_key):
        with sentry_sdk.isolation_scope():
            client.variation(flag_key, context, False)
            return [f["flag"] for f in sentry_sdk.get_current_scope().flags.get()]

    async def runner():
        return asyncio.gather(task("world"), task("other"))

    td.update(td.flag("hello").variation_for_all(True))
    td.update(td.flag("world").variation_for_all(False))
    client.variation("hello", context, False)

    results = asyncio.run(runner()).result()
    assert results[0] == ["hello", "world"]
    assert results[1] == ["hello", "other"]


def test_launchdarkly_integration_did_not_enable(monkeypatch):
    # Client is not passed in and set_config wasn't called.
    # TODO: Bad practice to access internals like this. We can skip this test, or remove this
    #  case entirely (force user to pass in a client instance).
    ldclient._reset_client()
    try:
        ldclient.__lock.lock()
        ldclient.__config = None
    finally:
        ldclient.__lock.unlock()

    with pytest.raises(DidNotEnable):
        LaunchDarklyIntegration()

    # Client not initialized.
    client = LDClient(config=Config("sdk-key"))
    monkeypatch.setattr(client, "is_initialized", lambda: False)
    with pytest.raises(DidNotEnable):
        LaunchDarklyIntegration(ld_client=client)
sentry-python-2.18.0/tests/integrations/litestar/000077500000000000000000000000001471214654000221505ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/litestar/__init__.py000066400000000000000000000000571471214654000242630ustar00rootroot00000000000000import pytest

pytest.importorskip("litestar")
sentry-python-2.18.0/tests/integrations/litestar/test_litestar.py000066400000000000000000000275111471214654000254160ustar00rootroot00000000000000from __future__ import annotations
import functools

import pytest

from sentry_sdk import capture_message
from sentry_sdk.integrations.litestar import LitestarIntegration

from typing import Any

from litestar import Litestar, get, Controller
from litestar.logging.config import LoggingConfig
from litestar.middleware import AbstractMiddleware
from litestar.middleware.logging import LoggingMiddlewareConfig
from litestar.middleware.rate_limit import RateLimitConfig
from litestar.middleware.session.server_side import ServerSideSessionConfig
from litestar.testing import TestClient


def litestar_app_factory(middleware=None, debug=True, exception_handlers=None):
    class MyController(Controller):
        path = "/controller"

        @get("/error")
        async def controller_error(self) -> None:
            raise Exception("Whoa")

    @get("/some_url")
    async def homepage_handler() -> "dict[str, Any]":
        1 / 0
        return {"status": "ok"}

    @get("/custom_error", name="custom_name")
    async def custom_error() -> Any:
        raise Exception("Too Hot")

    @get("/message")
    async def message() -> "dict[str, Any]":
        capture_message("hi")
        return {"status": "ok"}

    @get("/message/{message_id:str}")
    async def message_with_id() -> "dict[str, Any]":
        capture_message("hi")
        return {"status": "ok"}

    logging_config = LoggingConfig()

    app = Litestar(
        route_handlers=[
            homepage_handler,
            custom_error,
            message,
            message_with_id,
            MyController,
        ],
        debug=debug,
        middleware=middleware,
        logging_config=logging_config,
        exception_handlers=exception_handlers,
    )

    return app


@pytest.mark.parametrize(
    "test_url,expected_error,expected_message,expected_tx_name",
    [
        (
            "/some_url",
            ZeroDivisionError,
            "division by zero",
            "tests.integrations.litestar.test_litestar.litestar_app_factory..homepage_handler",
        ),
        (
            "/custom_error",
            Exception,
            "Too Hot",
            "custom_name",
        ),
        (
            "/controller/error",
            Exception,
            "Whoa",
            "tests.integrations.litestar.test_litestar.litestar_app_factory..MyController.controller_error",
        ),
    ],
)
def test_catch_exceptions(
    sentry_init,
    capture_exceptions,
    capture_events,
    test_url,
    expected_error,
    expected_message,
    expected_tx_name,
):
    sentry_init(integrations=[LitestarIntegration()])
    litestar_app = litestar_app_factory()
    exceptions = capture_exceptions()
    events = capture_events()

    client = TestClient(litestar_app)
    try:
        client.get(test_url)
    except Exception:
        pass

    (exc,) = exceptions
    assert isinstance(exc, expected_error)
    assert str(exc) == expected_message

    (event,) = events
    assert expected_tx_name in event["transaction"]
    assert event["exception"]["values"][0]["mechanism"]["type"] == "litestar"


def test_middleware_spans(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[LitestarIntegration()],
    )

    logging_config = LoggingMiddlewareConfig()
    session_config = ServerSideSessionConfig()
    rate_limit_config = RateLimitConfig(rate_limit=("hour", 5))

    litestar_app = litestar_app_factory(
        middleware=[
            session_config.middleware,
            logging_config.middleware,
            rate_limit_config.middleware,
        ]
    )
    events = capture_events()

    client = TestClient(
        litestar_app, raise_server_exceptions=False, base_url="http://testserver.local"
    )
    client.get("/message")

    (_, transaction_event) = events

    expected = {"SessionMiddleware", "LoggingMiddleware", "RateLimitMiddleware"}
    found = set()

    litestar_spans = (
        span
        for span in transaction_event["spans"]
        if span["op"] == "middleware.litestar"
    )

    for span in litestar_spans:
        assert span["description"] in expected
        assert span["description"] not in found
        found.add(span["description"])
        assert span["description"] == span["tags"]["litestar.middleware_name"]


def test_middleware_callback_spans(sentry_init, capture_events):
    class SampleMiddleware(AbstractMiddleware):
        async def __call__(self, scope, receive, send) -> None:
            async def do_stuff(message):
                if message["type"] == "http.response.start":
                    # do something here.
                    pass
                await send(message)

            await self.app(scope, receive, do_stuff)

    sentry_init(
        traces_sample_rate=1.0,
        integrations=[LitestarIntegration()],
    )
    litestar_app = litestar_app_factory(middleware=[SampleMiddleware])
    events = capture_events()

    client = TestClient(litestar_app, raise_server_exceptions=False)
    client.get("/message")

    (_, transaction_events) = events

    expected_litestar_spans = [
        {
            "op": "middleware.litestar",
            "description": "SampleMiddleware",
            "tags": {"litestar.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.litestar.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"litestar.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.litestar.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"litestar.middleware_name": "SampleMiddleware"},
        },
    ]

    def is_matching_span(expected_span, actual_span):
        return (
            expected_span["op"] == actual_span["op"]
            and expected_span["description"] == actual_span["description"]
            and expected_span["tags"] == actual_span["tags"]
        )

    actual_litestar_spans = list(
        span
        for span in transaction_events["spans"]
        if "middleware.litestar" in span["op"]
    )
    assert len(actual_litestar_spans) == 3

    for expected_span in expected_litestar_spans:
        assert any(
            is_matching_span(expected_span, actual_span)
            for actual_span in actual_litestar_spans
        )


def test_middleware_receive_send(sentry_init, capture_events):
    class SampleReceiveSendMiddleware(AbstractMiddleware):
        async def __call__(self, scope, receive, send):
            message = await receive()
            assert message
            assert message["type"] == "http.request"

            send_output = await send({"type": "something-unimportant"})
            assert send_output is None

            await self.app(scope, receive, send)

    sentry_init(
        traces_sample_rate=1.0,
        integrations=[LitestarIntegration()],
    )
    litestar_app = litestar_app_factory(middleware=[SampleReceiveSendMiddleware])

    client = TestClient(litestar_app, raise_server_exceptions=False)
    # See SampleReceiveSendMiddleware.__call__ above for assertions of correct behavior
    client.get("/message")


def test_middleware_partial_receive_send(sentry_init, capture_events):
    class SamplePartialReceiveSendMiddleware(AbstractMiddleware):
        async def __call__(self, scope, receive, send):
            message = await receive()
            assert message
            assert message["type"] == "http.request"

            send_output = await send({"type": "something-unimportant"})
            assert send_output is None

            async def my_receive(*args, **kwargs):
                pass

            async def my_send(*args, **kwargs):
                pass

            partial_receive = functools.partial(my_receive)
            partial_send = functools.partial(my_send)

            await self.app(scope, partial_receive, partial_send)

    sentry_init(
        traces_sample_rate=1.0,
        integrations=[LitestarIntegration()],
    )
    litestar_app = litestar_app_factory(middleware=[SamplePartialReceiveSendMiddleware])
    events = capture_events()

    client = TestClient(litestar_app, raise_server_exceptions=False)
    # See SamplePartialReceiveSendMiddleware.__call__ above for assertions of correct behavior
    client.get("/message")

    (_, transaction_events) = events

    expected_litestar_spans = [
        {
            "op": "middleware.litestar",
            "description": "SamplePartialReceiveSendMiddleware",
            "tags": {"litestar.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
        {
            "op": "middleware.litestar.receive",
            "description": "TestClientTransport.create_receive..receive",
            "tags": {"litestar.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
        {
            "op": "middleware.litestar.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"litestar.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
    ]

    def is_matching_span(expected_span, actual_span):
        return (
            expected_span["op"] == actual_span["op"]
            and actual_span["description"].startswith(expected_span["description"])
            and expected_span["tags"] == actual_span["tags"]
        )

    actual_litestar_spans = list(
        span
        for span in transaction_events["spans"]
        if "middleware.litestar" in span["op"]
    )
    assert len(actual_litestar_spans) == 3

    for expected_span in expected_litestar_spans:
        assert any(
            is_matching_span(expected_span, actual_span)
            for actual_span in actual_litestar_spans
        )


def test_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[LitestarIntegration()],
        traces_sample_rate=1.0,
    )

    logging_config = LoggingMiddlewareConfig()
    session_config = ServerSideSessionConfig()
    rate_limit_config = RateLimitConfig(rate_limit=("hour", 5))

    litestar_app = litestar_app_factory(
        middleware=[
            session_config.middleware,
            logging_config.middleware,
            rate_limit_config.middleware,
        ]
    )
    events = capture_events()

    client = TestClient(
        litestar_app, raise_server_exceptions=False, base_url="http://testserver.local"
    )
    client.get("/message")

    (_, event) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.litestar"
    for span in event["spans"]:
        assert span["origin"] == "auto.http.litestar"


@pytest.mark.parametrize(
    "is_send_default_pii",
    [
        True,
        False,
    ],
    ids=[
        "send_default_pii=True",
        "send_default_pii=False",
    ],
)
def test_litestar_scope_user_on_exception_event(
    sentry_init, capture_exceptions, capture_events, is_send_default_pii
):
    class TestUserMiddleware(AbstractMiddleware):
        async def __call__(self, scope, receive, send):
            scope["user"] = {
                "email": "lennon@thebeatles.com",
                "username": "john",
                "id": "1",
            }
            await self.app(scope, receive, send)

    sentry_init(
        integrations=[LitestarIntegration()], send_default_pii=is_send_default_pii
    )
    litestar_app = litestar_app_factory(middleware=[TestUserMiddleware])
    exceptions = capture_exceptions()
    events = capture_events()

    # This request intentionally raises an exception
    client = TestClient(litestar_app)
    try:
        client.get("/some_url")
    except Exception:
        pass

    assert len(exceptions) == 1
    assert len(events) == 1
    (event,) = events

    if is_send_default_pii:
        assert "user" in event
        assert event["user"] == {
            "email": "lennon@thebeatles.com",
            "username": "john",
            "id": "1",
        }
    else:
        assert "user" not in event
sentry-python-2.18.0/tests/integrations/logging/000077500000000000000000000000001471214654000217475ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/logging/test_logging.py000066400000000000000000000151651471214654000250160ustar00rootroot00000000000000import logging
import warnings

import pytest

from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger

other_logger = logging.getLogger("testfoo")
logger = logging.getLogger(__name__)


@pytest.fixture(autouse=True)
def reset_level():
    other_logger.setLevel(logging.DEBUG)
    logger.setLevel(logging.DEBUG)


@pytest.mark.parametrize("logger", [logger, other_logger])
def test_logging_works_with_many_loggers(sentry_init, capture_events, logger):
    sentry_init(integrations=[LoggingIntegration(event_level="ERROR")])
    events = capture_events()

    logger.info("bread")
    logger.critical("LOL")
    (event,) = events
    assert event["level"] == "fatal"
    assert not event["logentry"]["params"]
    assert event["logentry"]["message"] == "LOL"
    assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"])


@pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]])
@pytest.mark.parametrize(
    "kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}]
)
def test_logging_defaults(integrations, sentry_init, capture_events, kwargs):
    sentry_init(integrations=integrations)
    events = capture_events()

    logger.info("bread")
    logger.critical("LOL", **kwargs)
    (event,) = events

    assert event["level"] == "fatal"
    assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"])
    assert not any(
        crumb["message"] == "LOL" for crumb in event["breadcrumbs"]["values"]
    )
    assert "threads" not in event


def test_logging_extra_data(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    logger.info("bread", extra=dict(foo=42))
    logger.critical("lol", extra=dict(bar=69))

    (event,) = events

    assert event["level"] == "fatal"
    assert event["extra"] == {"bar": 69}
    assert any(
        crumb["message"] == "bread" and crumb["data"] == {"foo": 42}
        for crumb in event["breadcrumbs"]["values"]
    )


def test_logging_extra_data_integer_keys(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    logger.critical("integer in extra keys", extra={1: 1})

    (event,) = events

    assert event["extra"] == {"1": 1}


def test_logging_stack(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    logger.error("first", exc_info=True)
    logger.error("second")

    (
        event_with,
        event_without,
    ) = events

    assert event_with["level"] == "error"
    assert event_with["threads"]["values"][0]["stacktrace"]["frames"]

    assert event_without["level"] == "error"
    assert "threads" not in event_without


def test_logging_level(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    logger.setLevel(logging.WARNING)
    logger.error("hi")
    (event,) = events
    assert event["level"] == "error"
    assert event["logentry"]["message"] == "hi"

    del events[:]

    logger.setLevel(logging.ERROR)
    logger.warning("hi")
    assert not events


def test_custom_log_level_names(sentry_init, capture_events):
    levels = {
        logging.DEBUG: "debug",
        logging.INFO: "info",
        logging.WARN: "warning",
        logging.WARNING: "warning",
        logging.ERROR: "error",
        logging.CRITICAL: "fatal",
        logging.FATAL: "fatal",
    }

    # set custom log level names
    logging.addLevelName(logging.DEBUG, "custom level debüg: ")
    logging.addLevelName(logging.INFO, "")
    logging.addLevelName(logging.WARN, "custom level warn: ")
    logging.addLevelName(logging.WARNING, "custom level warning: ")
    logging.addLevelName(logging.ERROR, None)
    logging.addLevelName(logging.CRITICAL, "custom level critical: ")
    logging.addLevelName(logging.FATAL, "custom level 🔥: ")

    for logging_level, sentry_level in levels.items():
        logger.setLevel(logging_level)
        sentry_init(
            integrations=[LoggingIntegration(event_level=logging_level)],
            default_integrations=False,
        )
        events = capture_events()

        logger.log(logging_level, "Trying level %s", logging_level)
        assert events
        assert events[0]["level"] == sentry_level
        assert events[0]["logentry"]["message"] == "Trying level %s"
        assert events[0]["logentry"]["params"] == [logging_level]

        del events[:]


def test_logging_filters(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    should_log = False

    class MyFilter(logging.Filter):
        def filter(self, record):
            return should_log

    logger.addFilter(MyFilter())
    logger.error("hi")

    assert not events

    should_log = True
    logger.error("hi")

    (event,) = events
    assert event["logentry"]["message"] == "hi"


def test_logging_captured_warnings(sentry_init, capture_events, recwarn):
    sentry_init(
        integrations=[LoggingIntegration(event_level="WARNING")],
        default_integrations=False,
    )
    events = capture_events()

    logging.captureWarnings(True)
    warnings.warn("first", stacklevel=2)
    warnings.warn("second", stacklevel=2)
    logging.captureWarnings(False)

    warnings.warn("third", stacklevel=2)

    assert len(events) == 2

    assert events[0]["level"] == "warning"
    # Captured warnings start with the path where the warning was raised
    assert "UserWarning: first" in events[0]["logentry"]["message"]
    assert events[0]["logentry"]["params"] == []

    assert events[1]["level"] == "warning"
    assert "UserWarning: second" in events[1]["logentry"]["message"]
    assert events[1]["logentry"]["params"] == []

    # Using recwarn suppresses the "third" warning in the test output
    assert len(recwarn) == 1
    assert str(recwarn[0].message) == "third"


def test_ignore_logger(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    ignore_logger("testfoo")

    other_logger.error("hi")

    assert not events


def test_ignore_logger_wildcard(sentry_init, capture_events):
    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
    events = capture_events()

    ignore_logger("testfoo.*")

    nested_logger = logging.getLogger("testfoo.submodule")

    logger.error("hi")

    nested_logger.error("bye")

    (event,) = events
    assert event["logentry"]["message"] == "hi"
sentry-python-2.18.0/tests/integrations/loguru/000077500000000000000000000000001471214654000216365ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/loguru/__init__.py000066400000000000000000000000551471214654000237470ustar00rootroot00000000000000import pytest

pytest.importorskip("loguru")
sentry-python-2.18.0/tests/integrations/loguru/test_loguru.py000066400000000000000000000063121471214654000245660ustar00rootroot00000000000000import pytest
from loguru import logger

import sentry_sdk
from sentry_sdk.integrations.loguru import LoguruIntegration, LoggingLevels

logger.remove(0)  # don't print to console


@pytest.mark.parametrize(
    "level,created_event",
    [
        # None - no breadcrumb
        # False - no event
        # True - event created
        (LoggingLevels.TRACE, None),
        (LoggingLevels.DEBUG, None),
        (LoggingLevels.INFO, False),
        (LoggingLevels.SUCCESS, False),
        (LoggingLevels.WARNING, False),
        (LoggingLevels.ERROR, True),
        (LoggingLevels.CRITICAL, True),
    ],
)
@pytest.mark.parametrize("disable_breadcrumbs", [True, False])
@pytest.mark.parametrize("disable_events", [True, False])
def test_just_log(
    sentry_init,
    capture_events,
    level,
    created_event,
    disable_breadcrumbs,
    disable_events,
):
    sentry_init(
        integrations=[
            LoguruIntegration(
                level=None if disable_breadcrumbs else LoggingLevels.INFO.value,
                event_level=None if disable_events else LoggingLevels.ERROR.value,
            )
        ],
        default_integrations=False,
    )
    events = capture_events()

    getattr(logger, level.name.lower())("test")

    formatted_message = (
        " | "
        + "{:9}".format(level.name.upper())
        + "| tests.integrations.loguru.test_loguru:test_just_log:46 - test"
    )

    if not created_event:
        assert not events

        breadcrumbs = sentry_sdk.get_isolation_scope()._breadcrumbs
        if (
            not disable_breadcrumbs and created_event is not None
        ):  # not None == not TRACE or DEBUG level
            (breadcrumb,) = breadcrumbs
            assert breadcrumb["level"] == level.name.lower()
            assert breadcrumb["category"] == "tests.integrations.loguru.test_loguru"
            assert breadcrumb["message"][23:] == formatted_message
        else:
            assert not breadcrumbs

        return

    if disable_events:
        assert not events
        return

    (event,) = events
    assert event["level"] == (level.name.lower())
    assert event["logger"] == "tests.integrations.loguru.test_loguru"
    assert event["logentry"]["message"][23:] == formatted_message


def test_breadcrumb_format(sentry_init, capture_events):
    sentry_init(
        integrations=[
            LoguruIntegration(
                level=LoggingLevels.INFO.value,
                event_level=None,
                breadcrumb_format="{message}",
            )
        ],
        default_integrations=False,
    )

    logger.info("test")
    formatted_message = "test"

    breadcrumbs = sentry_sdk.get_isolation_scope()._breadcrumbs
    (breadcrumb,) = breadcrumbs
    assert breadcrumb["message"] == formatted_message


def test_event_format(sentry_init, capture_events):
    sentry_init(
        integrations=[
            LoguruIntegration(
                level=None,
                event_level=LoggingLevels.ERROR.value,
                event_format="{message}",
            )
        ],
        default_integrations=False,
    )
    events = capture_events()

    logger.error("test")
    formatted_message = "test"

    (event,) = events
    assert event["logentry"]["message"] == formatted_message
sentry-python-2.18.0/tests/integrations/modules/000077500000000000000000000000001471214654000217715ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/modules/test_modules.py000066400000000000000000000005571471214654000250610ustar00rootroot00000000000000import sentry_sdk

from sentry_sdk.integrations.modules import ModulesIntegration


def test_basic(sentry_init, capture_events):
    sentry_init(integrations=[ModulesIntegration()])
    events = capture_events()

    sentry_sdk.capture_exception(ValueError())

    (event,) = events
    assert "sentry-sdk" in event["modules"]
    assert "pytest" in event["modules"]
sentry-python-2.18.0/tests/integrations/openai/000077500000000000000000000000001471214654000215745ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/openai/__init__.py000066400000000000000000000000551471214654000237050ustar00rootroot00000000000000import pytest

pytest.importorskip("openai")
sentry-python-2.18.0/tests/integrations/openai/test_openai.py000066400000000000000000000656061471214654000244750ustar00rootroot00000000000000import pytest
from openai import AsyncOpenAI, OpenAI, AsyncStream, Stream, OpenAIError
from openai.types import CompletionUsage, CreateEmbeddingResponse, Embedding
from openai.types.chat import ChatCompletion, ChatCompletionMessage, ChatCompletionChunk
from openai.types.chat.chat_completion import Choice
from openai.types.chat.chat_completion_chunk import ChoiceDelta, Choice as DeltaChoice
from openai.types.create_embedding_response import Usage as EmbeddingTokenUsage

from sentry_sdk import start_transaction
from sentry_sdk.integrations.openai import (
    OpenAIIntegration,
    _calculate_chat_completion_usage,
)

from unittest import mock  # python 3.3 and above

try:
    from unittest.mock import AsyncMock
except ImportError:

    class AsyncMock(mock.MagicMock):
        async def __call__(self, *args, **kwargs):
            return super(AsyncMock, self).__call__(*args, **kwargs)


EXAMPLE_CHAT_COMPLETION = ChatCompletion(
    id="chat-id",
    choices=[
        Choice(
            index=0,
            finish_reason="stop",
            message=ChatCompletionMessage(
                role="assistant", content="the model response"
            ),
        )
    ],
    created=10000000,
    model="model-id",
    object="chat.completion",
    usage=CompletionUsage(
        completion_tokens=10,
        prompt_tokens=20,
        total_tokens=30,
    ),
)


async def async_iterator(values):
    for value in values:
        yield value


@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [(True, True), (True, False), (False, True), (False, False)],
)
def test_nonstreaming_chat_completion(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    sentry_init(
        integrations=[OpenAIIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = OpenAI(api_key="z")
    client.chat.completions._post = mock.Mock(return_value=EXAMPLE_CHAT_COMPLETION)

    with start_transaction(name="openai tx"):
        response = (
            client.chat.completions.create(
                model="some-model", messages=[{"role": "system", "content": "hello"}]
            )
            .choices[0]
            .message.content
        )

    assert response == "the model response"
    tx = events[0]
    assert tx["type"] == "transaction"
    span = tx["spans"][0]
    assert span["op"] == "ai.chat_completions.create.openai"

    if send_default_pii and include_prompts:
        assert "hello" in span["data"]["ai.input_messages"]["content"]
        assert "the model response" in span["data"]["ai.responses"]["content"]
    else:
        assert "ai.input_messages" not in span["data"]
        assert "ai.responses" not in span["data"]

    assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10
    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 30


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [(True, True), (True, False), (False, True), (False, False)],
)
async def test_nonstreaming_chat_completion_async(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    sentry_init(
        integrations=[OpenAIIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = AsyncOpenAI(api_key="z")
    client.chat.completions._post = AsyncMock(return_value=EXAMPLE_CHAT_COMPLETION)

    with start_transaction(name="openai tx"):
        response = await client.chat.completions.create(
            model="some-model", messages=[{"role": "system", "content": "hello"}]
        )
        response = response.choices[0].message.content

    assert response == "the model response"
    tx = events[0]
    assert tx["type"] == "transaction"
    span = tx["spans"][0]
    assert span["op"] == "ai.chat_completions.create.openai"

    if send_default_pii and include_prompts:
        assert "hello" in span["data"]["ai.input_messages"]["content"]
        assert "the model response" in span["data"]["ai.responses"]["content"]
    else:
        assert "ai.input_messages" not in span["data"]
        assert "ai.responses" not in span["data"]

    assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10
    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 30


def tiktoken_encoding_if_installed():
    try:
        import tiktoken  # type: ignore # noqa # pylint: disable=unused-import

        return "cl100k_base"
    except ImportError:
        return None


# noinspection PyTypeChecker
@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [(True, True), (True, False), (False, True), (False, False)],
)
def test_streaming_chat_completion(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    sentry_init(
        integrations=[
            OpenAIIntegration(
                include_prompts=include_prompts,
                tiktoken_encoding_name=tiktoken_encoding_if_installed(),
            )
        ],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = OpenAI(api_key="z")
    returned_stream = Stream(cast_to=None, response=None, client=client)
    returned_stream._iterator = [
        ChatCompletionChunk(
            id="1",
            choices=[
                DeltaChoice(
                    index=0, delta=ChoiceDelta(content="hel"), finish_reason=None
                )
            ],
            created=100000,
            model="model-id",
            object="chat.completion.chunk",
        ),
        ChatCompletionChunk(
            id="1",
            choices=[
                DeltaChoice(
                    index=1, delta=ChoiceDelta(content="lo "), finish_reason=None
                )
            ],
            created=100000,
            model="model-id",
            object="chat.completion.chunk",
        ),
        ChatCompletionChunk(
            id="1",
            choices=[
                DeltaChoice(
                    index=2, delta=ChoiceDelta(content="world"), finish_reason="stop"
                )
            ],
            created=100000,
            model="model-id",
            object="chat.completion.chunk",
        ),
    ]

    client.chat.completions._post = mock.Mock(return_value=returned_stream)
    with start_transaction(name="openai tx"):
        response_stream = client.chat.completions.create(
            model="some-model", messages=[{"role": "system", "content": "hello"}]
        )
        response_string = "".join(
            map(lambda x: x.choices[0].delta.content, response_stream)
        )
    assert response_string == "hello world"
    tx = events[0]
    assert tx["type"] == "transaction"
    span = tx["spans"][0]
    assert span["op"] == "ai.chat_completions.create.openai"

    if send_default_pii and include_prompts:
        assert "hello" in span["data"]["ai.input_messages"]["content"]
        assert "hello world" in span["data"]["ai.responses"]
    else:
        assert "ai.input_messages" not in span["data"]
        assert "ai.responses" not in span["data"]

    try:
        import tiktoken  # type: ignore # noqa # pylint: disable=unused-import

        assert span["measurements"]["ai_completion_tokens_used"]["value"] == 2
        assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 1
        assert span["measurements"]["ai_total_tokens_used"]["value"] == 3
    except ImportError:
        pass  # if tiktoken is not installed, we can't guarantee token usage will be calculated properly


# noinspection PyTypeChecker
@pytest.mark.asyncio
@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [(True, True), (True, False), (False, True), (False, False)],
)
async def test_streaming_chat_completion_async(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    sentry_init(
        integrations=[
            OpenAIIntegration(
                include_prompts=include_prompts,
                tiktoken_encoding_name=tiktoken_encoding_if_installed(),
            )
        ],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = AsyncOpenAI(api_key="z")
    returned_stream = AsyncStream(cast_to=None, response=None, client=client)
    returned_stream._iterator = async_iterator(
        [
            ChatCompletionChunk(
                id="1",
                choices=[
                    DeltaChoice(
                        index=0, delta=ChoiceDelta(content="hel"), finish_reason=None
                    )
                ],
                created=100000,
                model="model-id",
                object="chat.completion.chunk",
            ),
            ChatCompletionChunk(
                id="1",
                choices=[
                    DeltaChoice(
                        index=1, delta=ChoiceDelta(content="lo "), finish_reason=None
                    )
                ],
                created=100000,
                model="model-id",
                object="chat.completion.chunk",
            ),
            ChatCompletionChunk(
                id="1",
                choices=[
                    DeltaChoice(
                        index=2,
                        delta=ChoiceDelta(content="world"),
                        finish_reason="stop",
                    )
                ],
                created=100000,
                model="model-id",
                object="chat.completion.chunk",
            ),
        ]
    )

    client.chat.completions._post = AsyncMock(return_value=returned_stream)
    with start_transaction(name="openai tx"):
        response_stream = await client.chat.completions.create(
            model="some-model", messages=[{"role": "system", "content": "hello"}]
        )

        response_string = ""
        async for x in response_stream:
            response_string += x.choices[0].delta.content

    assert response_string == "hello world"
    tx = events[0]
    assert tx["type"] == "transaction"
    span = tx["spans"][0]
    assert span["op"] == "ai.chat_completions.create.openai"

    if send_default_pii and include_prompts:
        assert "hello" in span["data"]["ai.input_messages"]["content"]
        assert "hello world" in span["data"]["ai.responses"]
    else:
        assert "ai.input_messages" not in span["data"]
        assert "ai.responses" not in span["data"]

    try:
        import tiktoken  # type: ignore # noqa # pylint: disable=unused-import

        assert span["measurements"]["ai_completion_tokens_used"]["value"] == 2
        assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 1
        assert span["measurements"]["ai_total_tokens_used"]["value"] == 3
    except ImportError:
        pass  # if tiktoken is not installed, we can't guarantee token usage will be calculated properly


def test_bad_chat_completion(sentry_init, capture_events):
    sentry_init(integrations=[OpenAIIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    client = OpenAI(api_key="z")
    client.chat.completions._post = mock.Mock(
        side_effect=OpenAIError("API rate limit reached")
    )
    with pytest.raises(OpenAIError):
        client.chat.completions.create(
            model="some-model", messages=[{"role": "system", "content": "hello"}]
        )

    (event,) = events
    assert event["level"] == "error"


@pytest.mark.asyncio
async def test_bad_chat_completion_async(sentry_init, capture_events):
    sentry_init(integrations=[OpenAIIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    client = AsyncOpenAI(api_key="z")
    client.chat.completions._post = AsyncMock(
        side_effect=OpenAIError("API rate limit reached")
    )
    with pytest.raises(OpenAIError):
        await client.chat.completions.create(
            model="some-model", messages=[{"role": "system", "content": "hello"}]
        )

    (event,) = events
    assert event["level"] == "error"


@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [(True, True), (True, False), (False, True), (False, False)],
)
def test_embeddings_create(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    sentry_init(
        integrations=[OpenAIIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = OpenAI(api_key="z")

    returned_embedding = CreateEmbeddingResponse(
        data=[Embedding(object="embedding", index=0, embedding=[1.0, 2.0, 3.0])],
        model="some-model",
        object="list",
        usage=EmbeddingTokenUsage(
            prompt_tokens=20,
            total_tokens=30,
        ),
    )

    client.embeddings._post = mock.Mock(return_value=returned_embedding)
    with start_transaction(name="openai tx"):
        response = client.embeddings.create(
            input="hello", model="text-embedding-3-large"
        )

    assert len(response.data[0].embedding) == 3

    tx = events[0]
    assert tx["type"] == "transaction"
    span = tx["spans"][0]
    assert span["op"] == "ai.embeddings.create.openai"
    if send_default_pii and include_prompts:
        assert "hello" in span["data"]["ai.input_messages"]
    else:
        assert "ai.input_messages" not in span["data"]

    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 30


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [(True, True), (True, False), (False, True), (False, False)],
)
async def test_embeddings_create_async(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    sentry_init(
        integrations=[OpenAIIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = AsyncOpenAI(api_key="z")

    returned_embedding = CreateEmbeddingResponse(
        data=[Embedding(object="embedding", index=0, embedding=[1.0, 2.0, 3.0])],
        model="some-model",
        object="list",
        usage=EmbeddingTokenUsage(
            prompt_tokens=20,
            total_tokens=30,
        ),
    )

    client.embeddings._post = AsyncMock(return_value=returned_embedding)
    with start_transaction(name="openai tx"):
        response = await client.embeddings.create(
            input="hello", model="text-embedding-3-large"
        )

    assert len(response.data[0].embedding) == 3

    tx = events[0]
    assert tx["type"] == "transaction"
    span = tx["spans"][0]
    assert span["op"] == "ai.embeddings.create.openai"
    if send_default_pii and include_prompts:
        assert "hello" in span["data"]["ai.input_messages"]
    else:
        assert "ai.input_messages" not in span["data"]

    assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20
    assert span["measurements"]["ai_total_tokens_used"]["value"] == 30


@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [(True, True), (True, False), (False, True), (False, False)],
)
def test_embeddings_create_raises_error(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    sentry_init(
        integrations=[OpenAIIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = OpenAI(api_key="z")

    client.embeddings._post = mock.Mock(
        side_effect=OpenAIError("API rate limit reached")
    )

    with pytest.raises(OpenAIError):
        client.embeddings.create(input="hello", model="text-embedding-3-large")

    (event,) = events
    assert event["level"] == "error"


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "send_default_pii, include_prompts",
    [(True, True), (True, False), (False, True), (False, False)],
)
async def test_embeddings_create_raises_error_async(
    sentry_init, capture_events, send_default_pii, include_prompts
):
    sentry_init(
        integrations=[OpenAIIntegration(include_prompts=include_prompts)],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    client = AsyncOpenAI(api_key="z")

    client.embeddings._post = AsyncMock(
        side_effect=OpenAIError("API rate limit reached")
    )

    with pytest.raises(OpenAIError):
        await client.embeddings.create(input="hello", model="text-embedding-3-large")

    (event,) = events
    assert event["level"] == "error"


def test_span_origin_nonstreaming_chat(sentry_init, capture_events):
    sentry_init(
        integrations=[OpenAIIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = OpenAI(api_key="z")
    client.chat.completions._post = mock.Mock(return_value=EXAMPLE_CHAT_COMPLETION)

    with start_transaction(name="openai tx"):
        client.chat.completions.create(
            model="some-model", messages=[{"role": "system", "content": "hello"}]
        )

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.ai.openai"


@pytest.mark.asyncio
async def test_span_origin_nonstreaming_chat_async(sentry_init, capture_events):
    sentry_init(
        integrations=[OpenAIIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = AsyncOpenAI(api_key="z")
    client.chat.completions._post = AsyncMock(return_value=EXAMPLE_CHAT_COMPLETION)

    with start_transaction(name="openai tx"):
        await client.chat.completions.create(
            model="some-model", messages=[{"role": "system", "content": "hello"}]
        )

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.ai.openai"


def test_span_origin_streaming_chat(sentry_init, capture_events):
    sentry_init(
        integrations=[OpenAIIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = OpenAI(api_key="z")
    returned_stream = Stream(cast_to=None, response=None, client=client)
    returned_stream._iterator = [
        ChatCompletionChunk(
            id="1",
            choices=[
                DeltaChoice(
                    index=0, delta=ChoiceDelta(content="hel"), finish_reason=None
                )
            ],
            created=100000,
            model="model-id",
            object="chat.completion.chunk",
        ),
        ChatCompletionChunk(
            id="1",
            choices=[
                DeltaChoice(
                    index=1, delta=ChoiceDelta(content="lo "), finish_reason=None
                )
            ],
            created=100000,
            model="model-id",
            object="chat.completion.chunk",
        ),
        ChatCompletionChunk(
            id="1",
            choices=[
                DeltaChoice(
                    index=2, delta=ChoiceDelta(content="world"), finish_reason="stop"
                )
            ],
            created=100000,
            model="model-id",
            object="chat.completion.chunk",
        ),
    ]

    client.chat.completions._post = mock.Mock(return_value=returned_stream)
    with start_transaction(name="openai tx"):
        response_stream = client.chat.completions.create(
            model="some-model", messages=[{"role": "system", "content": "hello"}]
        )

        "".join(map(lambda x: x.choices[0].delta.content, response_stream))

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.ai.openai"


@pytest.mark.asyncio
async def test_span_origin_streaming_chat_async(sentry_init, capture_events):
    sentry_init(
        integrations=[OpenAIIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = AsyncOpenAI(api_key="z")
    returned_stream = AsyncStream(cast_to=None, response=None, client=client)
    returned_stream._iterator = async_iterator(
        [
            ChatCompletionChunk(
                id="1",
                choices=[
                    DeltaChoice(
                        index=0, delta=ChoiceDelta(content="hel"), finish_reason=None
                    )
                ],
                created=100000,
                model="model-id",
                object="chat.completion.chunk",
            ),
            ChatCompletionChunk(
                id="1",
                choices=[
                    DeltaChoice(
                        index=1, delta=ChoiceDelta(content="lo "), finish_reason=None
                    )
                ],
                created=100000,
                model="model-id",
                object="chat.completion.chunk",
            ),
            ChatCompletionChunk(
                id="1",
                choices=[
                    DeltaChoice(
                        index=2,
                        delta=ChoiceDelta(content="world"),
                        finish_reason="stop",
                    )
                ],
                created=100000,
                model="model-id",
                object="chat.completion.chunk",
            ),
        ]
    )

    client.chat.completions._post = AsyncMock(return_value=returned_stream)
    with start_transaction(name="openai tx"):
        response_stream = await client.chat.completions.create(
            model="some-model", messages=[{"role": "system", "content": "hello"}]
        )
        async for _ in response_stream:
            pass

        # "".join(map(lambda x: x.choices[0].delta.content, response_stream))

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.ai.openai"


def test_span_origin_embeddings(sentry_init, capture_events):
    sentry_init(
        integrations=[OpenAIIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = OpenAI(api_key="z")

    returned_embedding = CreateEmbeddingResponse(
        data=[Embedding(object="embedding", index=0, embedding=[1.0, 2.0, 3.0])],
        model="some-model",
        object="list",
        usage=EmbeddingTokenUsage(
            prompt_tokens=20,
            total_tokens=30,
        ),
    )

    client.embeddings._post = mock.Mock(return_value=returned_embedding)
    with start_transaction(name="openai tx"):
        client.embeddings.create(input="hello", model="text-embedding-3-large")

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.ai.openai"


@pytest.mark.asyncio
async def test_span_origin_embeddings_async(sentry_init, capture_events):
    sentry_init(
        integrations=[OpenAIIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = AsyncOpenAI(api_key="z")

    returned_embedding = CreateEmbeddingResponse(
        data=[Embedding(object="embedding", index=0, embedding=[1.0, 2.0, 3.0])],
        model="some-model",
        object="list",
        usage=EmbeddingTokenUsage(
            prompt_tokens=20,
            total_tokens=30,
        ),
    )

    client.embeddings._post = AsyncMock(return_value=returned_embedding)
    with start_transaction(name="openai tx"):
        await client.embeddings.create(input="hello", model="text-embedding-3-large")

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.ai.openai"


def test_calculate_chat_completion_usage_a():
    span = mock.MagicMock()

    def count_tokens(msg):
        return len(str(msg))

    response = mock.MagicMock()
    response.usage = mock.MagicMock()
    response.usage.completion_tokens = 10
    response.usage.prompt_tokens = 20
    response.usage.total_tokens = 30
    messages = []
    streaming_message_responses = []

    with mock.patch(
        "sentry_sdk.integrations.openai.record_token_usage"
    ) as mock_record_token_usage:
        _calculate_chat_completion_usage(
            messages, response, span, streaming_message_responses, count_tokens
        )
        mock_record_token_usage.assert_called_once_with(span, 20, 10, 30)


def test_calculate_chat_completion_usage_b():
    span = mock.MagicMock()

    def count_tokens(msg):
        return len(str(msg))

    response = mock.MagicMock()
    response.usage = mock.MagicMock()
    response.usage.completion_tokens = 10
    response.usage.total_tokens = 10
    messages = [
        {"content": "one"},
        {"content": "two"},
        {"content": "three"},
    ]
    streaming_message_responses = []

    with mock.patch(
        "sentry_sdk.integrations.openai.record_token_usage"
    ) as mock_record_token_usage:
        _calculate_chat_completion_usage(
            messages, response, span, streaming_message_responses, count_tokens
        )
        mock_record_token_usage.assert_called_once_with(span, 11, 10, 10)


def test_calculate_chat_completion_usage_c():
    span = mock.MagicMock()

    def count_tokens(msg):
        return len(str(msg))

    response = mock.MagicMock()
    response.usage = mock.MagicMock()
    response.usage.prompt_tokens = 20
    response.usage.total_tokens = 20
    messages = []
    streaming_message_responses = [
        "one",
        "two",
        "three",
    ]

    with mock.patch(
        "sentry_sdk.integrations.openai.record_token_usage"
    ) as mock_record_token_usage:
        _calculate_chat_completion_usage(
            messages, response, span, streaming_message_responses, count_tokens
        )
        mock_record_token_usage.assert_called_once_with(span, 20, 11, 20)


def test_calculate_chat_completion_usage_d():
    span = mock.MagicMock()

    def count_tokens(msg):
        return len(str(msg))

    response = mock.MagicMock()
    response.usage = mock.MagicMock()
    response.usage.prompt_tokens = 20
    response.usage.total_tokens = 20
    response.choices = [
        mock.MagicMock(message="one"),
        mock.MagicMock(message="two"),
        mock.MagicMock(message="three"),
    ]
    messages = []
    streaming_message_responses = []

    with mock.patch(
        "sentry_sdk.integrations.openai.record_token_usage"
    ) as mock_record_token_usage:
        _calculate_chat_completion_usage(
            messages, response, span, streaming_message_responses, count_tokens
        )
        mock_record_token_usage.assert_called_once_with(span, 20, None, 20)


def test_calculate_chat_completion_usage_e():
    span = mock.MagicMock()

    def count_tokens(msg):
        return len(str(msg))

    response = mock.MagicMock()
    messages = []
    streaming_message_responses = None

    with mock.patch(
        "sentry_sdk.integrations.openai.record_token_usage"
    ) as mock_record_token_usage:
        _calculate_chat_completion_usage(
            messages, response, span, streaming_message_responses, count_tokens
        )
        mock_record_token_usage.assert_called_once_with(span, None, None, None)
sentry-python-2.18.0/tests/integrations/openfeature/000077500000000000000000000000001471214654000226365ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/openfeature/__init__.py000066400000000000000000000000621471214654000247450ustar00rootroot00000000000000import pytest

pytest.importorskip("openfeature")
sentry-python-2.18.0/tests/integrations/openfeature/test_openfeature.py000066400000000000000000000053121471214654000265650ustar00rootroot00000000000000import asyncio
import concurrent.futures as cf
import sentry_sdk

from openfeature import api
from openfeature.provider.in_memory_provider import InMemoryFlag, InMemoryProvider
from sentry_sdk.integrations.openfeature import OpenFeatureIntegration


def test_openfeature_integration(sentry_init):
    sentry_init(integrations=[OpenFeatureIntegration()])

    flags = {
        "hello": InMemoryFlag("on", {"on": True, "off": False}),
        "world": InMemoryFlag("off", {"on": True, "off": False}),
    }
    api.set_provider(InMemoryProvider(flags))

    client = api.get_client()
    client.get_boolean_value("hello", default_value=False)
    client.get_boolean_value("world", default_value=False)
    client.get_boolean_value("other", default_value=True)

    assert sentry_sdk.get_current_scope().flags.get() == [
        {"flag": "hello", "result": True},
        {"flag": "world", "result": False},
        {"flag": "other", "result": True},
    ]


def test_openfeature_integration_threaded(sentry_init):
    sentry_init(integrations=[OpenFeatureIntegration()])

    flags = {
        "hello": InMemoryFlag("on", {"on": True, "off": False}),
        "world": InMemoryFlag("off", {"on": True, "off": False}),
    }
    api.set_provider(InMemoryProvider(flags))

    client = api.get_client()
    client.get_boolean_value("hello", default_value=False)

    def task(flag):
        # Create a new isolation scope for the thread. This means the flags
        with sentry_sdk.isolation_scope():
            client.get_boolean_value(flag, default_value=False)
            return [f["flag"] for f in sentry_sdk.get_current_scope().flags.get()]

    with cf.ThreadPoolExecutor(max_workers=2) as pool:
        results = list(pool.map(task, ["world", "other"]))

    assert results[0] == ["hello", "world"]
    assert results[1] == ["hello", "other"]


def test_openfeature_integration_asyncio(sentry_init):
    """Assert concurrently evaluated flags do not pollute one another."""

    async def task(flag):
        with sentry_sdk.isolation_scope():
            client.get_boolean_value(flag, default_value=False)
            return [f["flag"] for f in sentry_sdk.get_current_scope().flags.get()]

    async def runner():
        return asyncio.gather(task("world"), task("other"))

    sentry_init(integrations=[OpenFeatureIntegration()])

    flags = {
        "hello": InMemoryFlag("on", {"on": True, "off": False}),
        "world": InMemoryFlag("off", {"on": True, "off": False}),
    }
    api.set_provider(InMemoryProvider(flags))

    client = api.get_client()
    client.get_boolean_value("hello", default_value=False)

    results = asyncio.run(runner()).result()
    assert results[0] == ["hello", "world"]
    assert results[1] == ["hello", "other"]
sentry-python-2.18.0/tests/integrations/opentelemetry/000077500000000000000000000000001471214654000232155ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/opentelemetry/__init__.py000066400000000000000000000000641471214654000253260ustar00rootroot00000000000000import pytest

pytest.importorskip("opentelemetry")
sentry-python-2.18.0/tests/integrations/opentelemetry/test_entry_points.py000066400000000000000000000010151471214654000273600ustar00rootroot00000000000000import importlib
import os
from unittest.mock import patch

from opentelemetry import propagate
from sentry_sdk.integrations.opentelemetry import SentryPropagator


def test_propagator_loaded_if_mentioned_in_environment_variable():
    try:
        with patch.dict(os.environ, {"OTEL_PROPAGATORS": "sentry"}):
            importlib.reload(propagate)

            assert len(propagate.propagators) == 1
            assert isinstance(propagate.propagators[0], SentryPropagator)
    finally:
        importlib.reload(propagate)
sentry-python-2.18.0/tests/integrations/opentelemetry/test_experimental.py000066400000000000000000000024671471214654000273340ustar00rootroot00000000000000from unittest.mock import MagicMock, patch

import pytest


@pytest.mark.forked
def test_integration_enabled_if_option_is_on(sentry_init, reset_integrations):
    mocked_setup_once = MagicMock()

    with patch(
        "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once",
        mocked_setup_once,
    ):
        sentry_init(
            _experiments={
                "otel_powered_performance": True,
            },
        )
        mocked_setup_once.assert_called_once()


@pytest.mark.forked
def test_integration_not_enabled_if_option_is_off(sentry_init, reset_integrations):
    mocked_setup_once = MagicMock()

    with patch(
        "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once",
        mocked_setup_once,
    ):
        sentry_init(
            _experiments={
                "otel_powered_performance": False,
            },
        )
        mocked_setup_once.assert_not_called()


@pytest.mark.forked
def test_integration_not_enabled_if_option_is_missing(sentry_init, reset_integrations):
    mocked_setup_once = MagicMock()

    with patch(
        "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once",
        mocked_setup_once,
    ):
        sentry_init()
        mocked_setup_once.assert_not_called()
sentry-python-2.18.0/tests/integrations/opentelemetry/test_propagator.py000066400000000000000000000221201471214654000270010ustar00rootroot00000000000000import pytest

from unittest import mock
from unittest.mock import MagicMock

from opentelemetry.context import get_current
from opentelemetry.trace import (
    SpanContext,
    TraceFlags,
    set_span_in_context,
)
from opentelemetry.trace.propagation import get_current_span

from sentry_sdk.integrations.opentelemetry.consts import (
    SENTRY_BAGGAGE_KEY,
    SENTRY_TRACE_KEY,
)
from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator
from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
from sentry_sdk.tracing_utils import Baggage


@pytest.mark.forked
def test_extract_no_context_no_sentry_trace_header():
    """
    No context and NO Sentry trace data in getter.
    Extract should return empty context.
    """
    carrier = None
    context = None
    getter = MagicMock()
    getter.get.return_value = None

    modified_context = SentryPropagator().extract(carrier, context, getter)

    assert modified_context == {}


@pytest.mark.forked
def test_extract_context_no_sentry_trace_header():
    """
    Context but NO Sentry trace data in getter.
    Extract should return context as is.
    """
    carrier = None
    context = {"some": "value"}
    getter = MagicMock()
    getter.get.return_value = None

    modified_context = SentryPropagator().extract(carrier, context, getter)

    assert modified_context == context


@pytest.mark.forked
def test_extract_empty_context_sentry_trace_header_no_baggage():
    """
    Empty context but Sentry trace data but NO Baggage in getter.
    Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id.
    """
    carrier = None
    context = {}
    getter = MagicMock()
    getter.get.side_effect = [
        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
        None,
    ]

    modified_context = SentryPropagator().extract(carrier, context, getter)

    assert len(modified_context.keys()) == 3

    assert modified_context[SENTRY_TRACE_KEY] == {
        "trace_id": "1234567890abcdef1234567890abcdef",
        "parent_span_id": "1234567890abcdef",
        "parent_sampled": True,
    }
    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ""

    span_context = get_current_span(modified_context).get_span_context()
    assert span_context.span_id == int("1234567890abcdef", 16)
    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)


@pytest.mark.forked
def test_extract_context_sentry_trace_header_baggage():
    """
    Empty context but Sentry trace data and Baggage in getter.
    Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id.
    """
    baggage_header = (
        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
    )

    carrier = None
    context = {"some": "value"}
    getter = MagicMock()
    getter.get.side_effect = [
        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
        [baggage_header],
    ]

    modified_context = SentryPropagator().extract(carrier, context, getter)

    assert len(modified_context.keys()) == 4

    assert modified_context[SENTRY_TRACE_KEY] == {
        "trace_id": "1234567890abcdef1234567890abcdef",
        "parent_span_id": "1234567890abcdef",
        "parent_sampled": True,
    }

    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == (
        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
    )

    span_context = get_current_span(modified_context).get_span_context()
    assert span_context.span_id == int("1234567890abcdef", 16)
    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)


@pytest.mark.forked
def test_inject_empty_otel_span_map():
    """
    Empty otel_span_map.
    So there is no sentry_span to be found in inject()
    and the function is returned early and no setters are called.
    """
    carrier = None
    context = get_current()
    setter = MagicMock()
    setter.set = MagicMock()

    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        trace_flags=TraceFlags(TraceFlags.SAMPLED),
        is_remote=True,
    )
    span = MagicMock()
    span.get_span_context.return_value = span_context

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
        return_value=span,
    ):
        full_context = set_span_in_context(span, context)
        SentryPropagator().inject(carrier, full_context, setter)

        setter.set.assert_not_called()


@pytest.mark.forked
def test_inject_sentry_span_no_baggage():
    """
    Inject a sentry span with no baggage.
    """
    carrier = None
    context = get_current()
    setter = MagicMock()
    setter.set = MagicMock()

    trace_id = "1234567890abcdef1234567890abcdef"
    span_id = "1234567890abcdef"

    span_context = SpanContext(
        trace_id=int(trace_id, 16),
        span_id=int(span_id, 16),
        trace_flags=TraceFlags(TraceFlags.SAMPLED),
        is_remote=True,
    )
    span = MagicMock()
    span.get_span_context.return_value = span_context

    sentry_span = MagicMock()
    sentry_span.to_traceparent = mock.Mock(
        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
    )
    sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=None)

    span_processor = SentrySpanProcessor()
    span_processor.otel_span_map[span_id] = sentry_span

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
        return_value=span,
    ):
        full_context = set_span_in_context(span, context)
        SentryPropagator().inject(carrier, full_context, setter)

        setter.set.assert_called_once_with(
            carrier,
            "sentry-trace",
            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
        )


def test_inject_sentry_span_empty_baggage():
    """
    Inject a sentry span with no baggage.
    """
    carrier = None
    context = get_current()
    setter = MagicMock()
    setter.set = MagicMock()

    trace_id = "1234567890abcdef1234567890abcdef"
    span_id = "1234567890abcdef"

    span_context = SpanContext(
        trace_id=int(trace_id, 16),
        span_id=int(span_id, 16),
        trace_flags=TraceFlags(TraceFlags.SAMPLED),
        is_remote=True,
    )
    span = MagicMock()
    span.get_span_context.return_value = span_context

    sentry_span = MagicMock()
    sentry_span.to_traceparent = mock.Mock(
        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
    )
    sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=Baggage({}))

    span_processor = SentrySpanProcessor()
    span_processor.otel_span_map[span_id] = sentry_span

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
        return_value=span,
    ):
        full_context = set_span_in_context(span, context)
        SentryPropagator().inject(carrier, full_context, setter)

        setter.set.assert_called_once_with(
            carrier,
            "sentry-trace",
            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
        )


def test_inject_sentry_span_baggage():
    """
    Inject a sentry span with baggage.
    """
    carrier = None
    context = get_current()
    setter = MagicMock()
    setter.set = MagicMock()

    trace_id = "1234567890abcdef1234567890abcdef"
    span_id = "1234567890abcdef"

    span_context = SpanContext(
        trace_id=int(trace_id, 16),
        span_id=int(span_id, 16),
        trace_flags=TraceFlags(TraceFlags.SAMPLED),
        is_remote=True,
    )
    span = MagicMock()
    span.get_span_context.return_value = span_context

    sentry_span = MagicMock()
    sentry_span.to_traceparent = mock.Mock(
        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
    )
    sentry_items = {
        "sentry-trace_id": "771a43a4192642f0b136d5159a501700",
        "sentry-public_key": "49d0f7386ad645858ae85020e393bef3",
        "sentry-sample_rate": 0.01337,
        "sentry-user_id": "Amélie",
    }
    baggage = Baggage(sentry_items=sentry_items)
    sentry_span.containing_transaction.get_baggage = MagicMock(return_value=baggage)

    span_processor = SentrySpanProcessor()
    span_processor.otel_span_map[span_id] = sentry_span

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
        return_value=span,
    ):
        full_context = set_span_in_context(span, context)
        SentryPropagator().inject(carrier, full_context, setter)

        setter.set.assert_any_call(
            carrier,
            "sentry-trace",
            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
        )

        setter.set.assert_any_call(
            carrier,
            "baggage",
            baggage.serialize(),
        )
sentry-python-2.18.0/tests/integrations/opentelemetry/test_span_processor.py000066400000000000000000000513651471214654000277000ustar00rootroot00000000000000import time
from datetime import datetime, timezone
from unittest import mock
from unittest.mock import MagicMock

import pytest
from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode

import sentry_sdk
from sentry_sdk.integrations.opentelemetry.span_processor import (
    SentrySpanProcessor,
    link_trace_context_to_error_event,
)
from sentry_sdk.tracing import Span, Transaction
from sentry_sdk.tracing_utils import extract_sentrytrace_data


def test_is_sentry_span():
    otel_span = MagicMock()

    span_processor = SentrySpanProcessor()
    assert not span_processor._is_sentry_span(otel_span)

    client = MagicMock()
    client.options = {"instrumenter": "otel"}
    client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
    sentry_sdk.get_global_scope().set_client(client)

    assert not span_processor._is_sentry_span(otel_span)

    otel_span.attributes = {
        "http.url": "https://example.com",
    }
    assert not span_processor._is_sentry_span(otel_span)

    otel_span.attributes = {
        "http.url": "https://o123456.ingest.sentry.io/api/123/envelope",
    }
    assert span_processor._is_sentry_span(otel_span)


def test_get_otel_context():
    otel_span = MagicMock()
    otel_span.attributes = {"foo": "bar"}
    otel_span.resource = MagicMock()
    otel_span.resource.attributes = {"baz": "qux"}

    span_processor = SentrySpanProcessor()
    otel_context = span_processor._get_otel_context(otel_span)

    assert otel_context == {
        "attributes": {"foo": "bar"},
        "resource": {"baz": "qux"},
    }


def test_get_trace_data_with_span_and_trace():
    otel_span = MagicMock()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = None

    parent_context = {}

    span_processor = SentrySpanProcessor()
    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
    assert sentry_trace_data["span_id"] == "1234567890abcdef"
    assert sentry_trace_data["parent_span_id"] is None
    assert sentry_trace_data["parent_sampled"] is None
    assert sentry_trace_data["baggage"] is None


def test_get_trace_data_with_span_and_trace_and_parent():
    otel_span = MagicMock()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    parent_context = {}

    span_processor = SentrySpanProcessor()
    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
    assert sentry_trace_data["span_id"] == "1234567890abcdef"
    assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
    assert sentry_trace_data["parent_sampled"] is None
    assert sentry_trace_data["baggage"] is None


def test_get_trace_data_with_sentry_trace():
    otel_span = MagicMock()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    parent_context = {}

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
        side_effect=[
            extract_sentrytrace_data(
                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
            ),
            None,
        ],
    ):
        span_processor = SentrySpanProcessor()
        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
        assert sentry_trace_data["span_id"] == "1234567890abcdef"
        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
        assert sentry_trace_data["parent_sampled"] is True
        assert sentry_trace_data["baggage"] is None

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
        side_effect=[
            extract_sentrytrace_data(
                "1234567890abcdef1234567890abcdef-1234567890abcdef-0"
            ),
            None,
        ],
    ):
        span_processor = SentrySpanProcessor()
        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
        assert sentry_trace_data["span_id"] == "1234567890abcdef"
        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
        assert sentry_trace_data["parent_sampled"] is False
        assert sentry_trace_data["baggage"] is None


def test_get_trace_data_with_sentry_trace_and_baggage():
    otel_span = MagicMock()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    parent_context = {}

    baggage = (
        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
    )

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
        side_effect=[
            extract_sentrytrace_data(
                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
            ),
            baggage,
        ],
    ):
        span_processor = SentrySpanProcessor()
        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
        assert sentry_trace_data["span_id"] == "1234567890abcdef"
        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
        assert sentry_trace_data["parent_sampled"]
        assert sentry_trace_data["baggage"] == baggage


def test_update_span_with_otel_data_http_method():
    sentry_span = Span()

    otel_span = MagicMock()
    otel_span.name = "Test OTel Span"
    otel_span.kind = SpanKind.CLIENT
    otel_span.attributes = {
        "http.method": "GET",
        "http.status_code": 429,
        "http.status_text": "xxx",
        "http.user_agent": "curl/7.64.1",
        "net.peer.name": "example.com",
        "http.target": "/",
    }

    span_processor = SentrySpanProcessor()
    span_processor._update_span_with_otel_data(sentry_span, otel_span)

    assert sentry_span.op == "http.client"
    assert sentry_span.description == "GET example.com /"
    assert sentry_span.status == "resource_exhausted"

    assert sentry_span._data["http.method"] == "GET"
    assert sentry_span._data["http.response.status_code"] == 429
    assert sentry_span._data["http.status_text"] == "xxx"
    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
    assert sentry_span._data["net.peer.name"] == "example.com"
    assert sentry_span._data["http.target"] == "/"


@pytest.mark.parametrize(
    "otel_status, expected_status",
    [
        pytest.param(Status(StatusCode.UNSET), None, id="unset"),
        pytest.param(Status(StatusCode.OK), "ok", id="ok"),
        pytest.param(Status(StatusCode.ERROR), "internal_error", id="error"),
    ],
)
def test_update_span_with_otel_status(otel_status, expected_status):
    sentry_span = Span()

    otel_span = MagicMock()
    otel_span.name = "Test OTel Span"
    otel_span.kind = SpanKind.INTERNAL
    otel_span.status = otel_status

    span_processor = SentrySpanProcessor()
    span_processor._update_span_with_otel_status(sentry_span, otel_span)

    assert sentry_span.get_trace_context().get("status") == expected_status


def test_update_span_with_otel_data_http_method2():
    sentry_span = Span()

    otel_span = MagicMock()
    otel_span.name = "Test OTel Span"
    otel_span.kind = SpanKind.SERVER
    otel_span.attributes = {
        "http.method": "GET",
        "http.status_code": 429,
        "http.status_text": "xxx",
        "http.user_agent": "curl/7.64.1",
        "http.url": "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef",
    }

    span_processor = SentrySpanProcessor()
    span_processor._update_span_with_otel_data(sentry_span, otel_span)

    assert sentry_span.op == "http.server"
    assert sentry_span.description == "GET https://example.com/status/403"
    assert sentry_span.status == "resource_exhausted"

    assert sentry_span._data["http.method"] == "GET"
    assert sentry_span._data["http.response.status_code"] == 429
    assert sentry_span._data["http.status_text"] == "xxx"
    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
    assert (
        sentry_span._data["http.url"]
        == "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef"
    )


def test_update_span_with_otel_data_db_query():
    sentry_span = Span()

    otel_span = MagicMock()
    otel_span.name = "Test OTel Span"
    otel_span.attributes = {
        "db.system": "postgresql",
        "db.statement": "SELECT * FROM table where pwd = '123456'",
    }

    span_processor = SentrySpanProcessor()
    span_processor._update_span_with_otel_data(sentry_span, otel_span)

    assert sentry_span.op == "db"
    assert sentry_span.description == "SELECT * FROM table where pwd = '123456'"

    assert sentry_span._data["db.system"] == "postgresql"
    assert (
        sentry_span._data["db.statement"] == "SELECT * FROM table where pwd = '123456'"
    )


def test_on_start_transaction():
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.start_time = time.time_ns()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    parent_context = {}

    fake_start_transaction = MagicMock()

    fake_client = MagicMock()
    fake_client.options = {"instrumenter": "otel"}
    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
    sentry_sdk.get_global_scope().set_client(fake_client)

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.span_processor.start_transaction",
        fake_start_transaction,
    ):
        span_processor = SentrySpanProcessor()
        span_processor.on_start(otel_span, parent_context)

        fake_start_transaction.assert_called_once_with(
            name="Sample OTel Span",
            span_id="1234567890abcdef",
            parent_span_id="abcdef1234567890",
            trace_id="1234567890abcdef1234567890abcdef",
            baggage=None,
            start_timestamp=datetime.fromtimestamp(
                otel_span.start_time / 1e9, timezone.utc
            ),
            instrumenter="otel",
            origin="auto.otel",
        )

        assert len(span_processor.otel_span_map.keys()) == 1
        assert list(span_processor.otel_span_map.keys())[0] == "1234567890abcdef"


def test_on_start_child():
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.start_time = time.time_ns()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    parent_context = {}

    fake_client = MagicMock()
    fake_client.options = {"instrumenter": "otel"}
    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
    sentry_sdk.get_global_scope().set_client(fake_client)

    fake_span = MagicMock()

    span_processor = SentrySpanProcessor()
    span_processor.otel_span_map["abcdef1234567890"] = fake_span
    span_processor.on_start(otel_span, parent_context)

    fake_span.start_child.assert_called_once_with(
        span_id="1234567890abcdef",
        name="Sample OTel Span",
        start_timestamp=datetime.fromtimestamp(
            otel_span.start_time / 1e9, timezone.utc
        ),
        instrumenter="otel",
        origin="auto.otel",
    )

    assert len(span_processor.otel_span_map.keys()) == 2
    assert "abcdef1234567890" in span_processor.otel_span_map.keys()
    assert "1234567890abcdef" in span_processor.otel_span_map.keys()


def test_on_end_no_sentry_span():
    """
    If on_end is called on a span that is not in the otel_span_map, it should be a no-op.
    """
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.end_time = time.time_ns()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context

    span_processor = SentrySpanProcessor()
    span_processor.otel_span_map = {}
    span_processor._get_otel_context = MagicMock()
    span_processor._update_span_with_otel_data = MagicMock()

    span_processor.on_end(otel_span)

    span_processor._get_otel_context.assert_not_called()
    span_processor._update_span_with_otel_data.assert_not_called()


def test_on_end_sentry_transaction():
    """
    Test on_end for a sentry Transaction.
    """
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.end_time = time.time_ns()
    otel_span.status = Status(StatusCode.OK)
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context

    fake_client = MagicMock()
    fake_client.options = {"instrumenter": "otel"}
    sentry_sdk.get_global_scope().set_client(fake_client)

    fake_sentry_span = MagicMock(spec=Transaction)
    fake_sentry_span.set_context = MagicMock()
    fake_sentry_span.finish = MagicMock()

    span_processor = SentrySpanProcessor()
    span_processor._get_otel_context = MagicMock()
    span_processor._update_span_with_otel_data = MagicMock()
    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span

    span_processor.on_end(otel_span)

    fake_sentry_span.set_context.assert_called_once()
    span_processor._update_span_with_otel_data.assert_not_called()
    fake_sentry_span.set_status.assert_called_once_with("ok")
    fake_sentry_span.finish.assert_called_once()


def test_on_end_sentry_span():
    """
    Test on_end for a sentry Span.
    """
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.end_time = time.time_ns()
    otel_span.status = Status(StatusCode.OK)
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context

    fake_client = MagicMock()
    fake_client.options = {"instrumenter": "otel"}
    sentry_sdk.get_global_scope().set_client(fake_client)

    fake_sentry_span = MagicMock(spec=Span)
    fake_sentry_span.set_context = MagicMock()
    fake_sentry_span.finish = MagicMock()

    span_processor = SentrySpanProcessor()
    span_processor._get_otel_context = MagicMock()
    span_processor._update_span_with_otel_data = MagicMock()
    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span

    span_processor.on_end(otel_span)

    fake_sentry_span.set_context.assert_not_called()
    span_processor._update_span_with_otel_data.assert_called_once_with(
        fake_sentry_span, otel_span
    )
    fake_sentry_span.set_status.assert_called_once_with("ok")
    fake_sentry_span.finish.assert_called_once()


def test_link_trace_context_to_error_event():
    """
    Test that the trace context is added to the error event.
    """
    fake_client = MagicMock()
    fake_client.options = {"instrumenter": "otel"}
    sentry_sdk.get_global_scope().set_client(fake_client)

    span_id = "1234567890abcdef"
    trace_id = "1234567890abcdef1234567890abcdef"

    fake_trace_context = {
        "bla": "blub",
        "foo": "bar",
        "baz": 123,
    }

    sentry_span = MagicMock()
    sentry_span.get_trace_context = MagicMock(return_value=fake_trace_context)

    otel_span_map = {
        span_id: sentry_span,
    }

    span_context = SpanContext(
        trace_id=int(trace_id, 16),
        span_id=int(span_id, 16),
        is_remote=True,
    )
    otel_span = MagicMock()
    otel_span.get_span_context = MagicMock(return_value=span_context)

    fake_event = {"event_id": "1234567890abcdef1234567890abcdef"}

    with mock.patch(
        "sentry_sdk.integrations.opentelemetry.span_processor.get_current_span",
        return_value=otel_span,
    ):
        event = link_trace_context_to_error_event(fake_event, otel_span_map)

        assert event
        assert event == fake_event  # the event is changed in place inside the function
        assert "contexts" in event
        assert "trace" in event["contexts"]
        assert event["contexts"]["trace"] == fake_trace_context


def test_pruning_old_spans_on_start():
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.start_time = time.time_ns()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    parent_context = {}
    fake_client = MagicMock()
    fake_client.options = {"instrumenter": "otel", "debug": False}
    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
    sentry_sdk.get_global_scope().set_client(fake_client)

    span_processor = SentrySpanProcessor()

    span_processor.otel_span_map = {
        "111111111abcdef": MagicMock(),  # should stay
        "2222222222abcdef": MagicMock(),  # should go
        "3333333333abcdef": MagicMock(),  # should go
    }
    current_time_minutes = int(time.time() / 60)
    span_processor.open_spans = {
        current_time_minutes - 3: {"111111111abcdef"},  # should stay
        current_time_minutes
        - 11: {"2222222222abcdef", "3333333333abcdef"},  # should go
    }

    span_processor.on_start(otel_span, parent_context)
    assert sorted(list(span_processor.otel_span_map.keys())) == [
        "111111111abcdef",
        "1234567890abcdef",
    ]
    assert sorted(list(span_processor.open_spans.values())) == [
        {"111111111abcdef"},
        {"1234567890abcdef"},
    ]


def test_pruning_old_spans_on_end():
    otel_span = MagicMock()
    otel_span.name = "Sample OTel Span"
    otel_span.start_time = time.time_ns()
    span_context = SpanContext(
        trace_id=int("1234567890abcdef1234567890abcdef", 16),
        span_id=int("1234567890abcdef", 16),
        is_remote=True,
    )
    otel_span.get_span_context.return_value = span_context
    otel_span.parent = MagicMock()
    otel_span.parent.span_id = int("abcdef1234567890", 16)

    fake_client = MagicMock()
    fake_client.options = {"instrumenter": "otel"}
    sentry_sdk.get_global_scope().set_client(fake_client)

    fake_sentry_span = MagicMock(spec=Span)
    fake_sentry_span.set_context = MagicMock()
    fake_sentry_span.finish = MagicMock()

    span_processor = SentrySpanProcessor()
    span_processor._get_otel_context = MagicMock()
    span_processor._update_span_with_otel_data = MagicMock()

    span_processor.otel_span_map = {
        "111111111abcdef": MagicMock(),  # should stay
        "2222222222abcdef": MagicMock(),  # should go
        "3333333333abcdef": MagicMock(),  # should go
        "1234567890abcdef": fake_sentry_span,  # should go (because it is closed)
    }
    current_time_minutes = int(time.time() / 60)
    span_processor.open_spans = {
        current_time_minutes: {"1234567890abcdef"},  # should go (because it is closed)
        current_time_minutes - 3: {"111111111abcdef"},  # should stay
        current_time_minutes
        - 11: {"2222222222abcdef", "3333333333abcdef"},  # should go
    }

    span_processor.on_end(otel_span)
    assert sorted(list(span_processor.otel_span_map.keys())) == ["111111111abcdef"]
    assert sorted(list(span_processor.open_spans.values())) == [{"111111111abcdef"}]
sentry-python-2.18.0/tests/integrations/pure_eval/000077500000000000000000000000001471214654000223035ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/pure_eval/__init__.py000066400000000000000000000000601471214654000244100ustar00rootroot00000000000000import pytest

pytest.importorskip("pure_eval")
sentry-python-2.18.0/tests/integrations/pure_eval/test_pure_eval.py000066400000000000000000000044121471214654000256770ustar00rootroot00000000000000from types import SimpleNamespace

import pytest

from sentry_sdk import capture_exception, serializer
from sentry_sdk.integrations.pure_eval import PureEvalIntegration


@pytest.mark.parametrize("integrations", [[], [PureEvalIntegration()]])
def test_include_local_variables_enabled(sentry_init, capture_events, integrations):
    sentry_init(include_local_variables=True, integrations=integrations)
    events = capture_events()

    def foo():
        namespace = SimpleNamespace()
        q = 1
        w = 2
        e = 3
        r = 4
        t = 5
        y = 6
        u = 7
        i = 8
        o = 9
        p = 10
        a = 11
        s = 12
        str((q, w, e, r, t, y, u, i, o, p, a, s))  # use variables for linter
        namespace.d = {1: 2}
        print(namespace.d[1] / 0)

        # Appearances of variables after the main statement don't affect order
        print(q)
        print(s)
        print(events)

    try:
        foo()
    except Exception:
        capture_exception()

    (event,) = events

    assert all(
        frame["vars"]
        for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
    )

    frame_vars = event["exception"]["values"][0]["stacktrace"]["frames"][-1]["vars"]

    if integrations:
        # Values closest to the exception line appear first
        # Test this order if possible given the Python version and dict order
        expected_keys = [
            "namespace",
            "namespace.d",
            "namespace.d[1]",
            "s",
            "a",
            "p",
            "o",
            "i",
            "u",
            "y",
        ]
        assert list(frame_vars.keys()) == expected_keys
        assert frame_vars["namespace.d"] == {"1": "2"}
        assert frame_vars["namespace.d[1]"] == "2"
    else:
        # Without pure_eval, the variables are unpredictable.
        # In later versions, those at the top appear first and are thus included
        assert frame_vars.keys() <= {
            "namespace",
            "q",
            "w",
            "e",
            "r",
            "t",
            "y",
            "u",
            "i",
            "o",
            "p",
            "a",
            "s",
            "events",
        }
        assert len(frame_vars) == serializer.MAX_DATABAG_BREADTH
sentry-python-2.18.0/tests/integrations/pymongo/000077500000000000000000000000001471214654000220115ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/pymongo/__init__.py000066400000000000000000000000561471214654000241230ustar00rootroot00000000000000import pytest

pytest.importorskip("pymongo")
sentry-python-2.18.0/tests/integrations/pymongo/test_pymongo.py000066400000000000000000000365231471214654000251230ustar00rootroot00000000000000from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.pymongo import PyMongoIntegration, _strip_pii

from mockupdb import MockupDB, OpQuery
from pymongo import MongoClient
import pytest


@pytest.fixture(scope="session")
def mongo_server():
    server = MockupDB(verbose=True)
    server.autoresponds("ismaster", maxWireVersion=6)
    server.run()
    server.autoresponds(
        {"find": "test_collection"}, cursor={"id": 123, "firstBatch": []}
    )
    # Find query changed somewhere between PyMongo 3.1 and 3.12.
    # This line is to respond to "find" queries sent by old PyMongo the same way it's done above.
    server.autoresponds(OpQuery({"foobar": 1}), cursor={"id": 123, "firstBatch": []})
    server.autoresponds({"insert": "test_collection"}, ok=1)
    server.autoresponds({"insert": "erroneous"}, ok=0, errmsg="test error")
    yield server
    server.stop()


@pytest.mark.parametrize("with_pii", [False, True])
def test_transactions(sentry_init, capture_events, mongo_server, with_pii):
    sentry_init(
        integrations=[PyMongoIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=with_pii,
    )
    events = capture_events()

    connection = MongoClient(mongo_server.uri)

    with start_transaction():
        list(
            connection["test_db"]["test_collection"].find({"foobar": 1})
        )  # force query execution
        connection["test_db"]["test_collection"].insert_one({"foo": 2})
        try:
            connection["test_db"]["erroneous"].insert_many([{"bar": 3}, {"baz": 4}])
            pytest.fail("Request should raise")
        except Exception:
            pass

    (event,) = events
    (find, insert_success, insert_fail) = event["spans"]

    common_tags = {
        "db.name": "test_db",
        "db.system": "mongodb",
        "net.peer.name": mongo_server.host,
        "net.peer.port": str(mongo_server.port),
    }
    for span in find, insert_success, insert_fail:
        assert span["data"][SPANDATA.DB_SYSTEM] == "mongodb"
        assert span["data"][SPANDATA.DB_NAME] == "test_db"
        assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
        assert span["data"][SPANDATA.SERVER_PORT] == mongo_server.port
        for field, value in common_tags.items():
            assert span["tags"][field] == value
            assert span["data"][field] == value

    assert find["op"] == "db"
    assert insert_success["op"] == "db"
    assert insert_fail["op"] == "db"

    assert find["data"]["db.operation"] == "find"
    assert find["tags"]["db.operation"] == "find"
    assert insert_success["data"]["db.operation"] == "insert"
    assert insert_success["tags"]["db.operation"] == "insert"
    assert insert_fail["data"]["db.operation"] == "insert"
    assert insert_fail["tags"]["db.operation"] == "insert"

    assert find["description"].startswith('{"find')
    assert insert_success["description"].startswith('{"insert')
    assert insert_fail["description"].startswith('{"insert')

    assert find["data"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection"
    assert find["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection"
    assert insert_success["data"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection"
    assert insert_success["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection"
    assert insert_fail["data"][SPANDATA.DB_MONGODB_COLLECTION] == "erroneous"
    assert insert_fail["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "erroneous"
    if with_pii:
        assert "1" in find["description"]
        assert "2" in insert_success["description"]
        assert "3" in insert_fail["description"] and "4" in insert_fail["description"]
    else:
        # All values in filter replaced by "%s"
        assert "1" not in find["description"]
        # All keys below top level replaced by "%s"
        assert "2" not in insert_success["description"]
        assert (
            "3" not in insert_fail["description"]
            and "4" not in insert_fail["description"]
        )

    assert find["tags"]["status"] == "ok"
    assert insert_success["tags"]["status"] == "ok"
    assert insert_fail["tags"]["status"] == "internal_error"


@pytest.mark.parametrize("with_pii", [False, True])
def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii):
    sentry_init(
        integrations=[PyMongoIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=with_pii,
    )
    events = capture_events()

    connection = MongoClient(mongo_server.uri)

    list(
        connection["test_db"]["test_collection"].find({"foobar": 1})
    )  # force query execution
    capture_message("hi")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb["category"] == "query"
    assert crumb["message"].startswith('{"find')
    if with_pii:
        assert "1" in crumb["message"]
    else:
        assert "1" not in crumb["message"]
    assert crumb["type"] == "db"
    assert crumb["data"] == {
        "db.name": "test_db",
        "db.system": "mongodb",
        "db.operation": "find",
        "net.peer.name": mongo_server.host,
        "net.peer.port": str(mongo_server.port),
        "db.mongodb.collection": "test_collection",
    }


@pytest.mark.parametrize(
    "testcase",
    [
        {
            "command": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {
                        "username": "anton2",
                        "email": "anton@somewhere.io",
                        "password": "c4e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
                        "_id": "635bc7403cb4f8a736f61cf2",
                    }
                ],
            },
            "command_stripped": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
                ],
            },
        },
        {
            "command": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {
                        "username": "indiana4",
                        "email": "indy@jones.org",
                        "password": "63e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf016b",
                        "_id": "635bc7403cb4f8a736f61cf3",
                    }
                ],
            },
            "command_stripped": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
                ],
            },
        },
        {
            "command": {
                "find": "my_collection",
                "filter": {},
                "limit": 1,
                "singleBatch": True,
            },
            "command_stripped": {
                "find": "my_collection",
                "filter": {},
                "limit": 1,
                "singleBatch": True,
            },
        },
        {
            "command": {
                "find": "my_collection",
                "filter": {"username": "notthere"},
                "limit": 1,
                "singleBatch": True,
            },
            "command_stripped": {
                "find": "my_collection",
                "filter": {"username": "%s"},
                "limit": 1,
                "singleBatch": True,
            },
        },
        {
            "command": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {
                        "username": "userx1",
                        "email": "x@somewhere.io",
                        "password": "ccc86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
                        "_id": "635bc7403cb4f8a736f61cf4",
                    },
                    {
                        "username": "userx2",
                        "email": "x@somewhere.io",
                        "password": "xxx86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
                        "_id": "635bc7403cb4f8a736f61cf5",
                    },
                ],
            },
            "command_stripped": {
                "insert": "my_collection",
                "ordered": True,
                "documents": [
                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
                ],
            },
        },
        {
            "command": {
                "find": "my_collection",
                "filter": {"email": "ada@lovelace.com"},
            },
            "command_stripped": {"find": "my_collection", "filter": {"email": "%s"}},
        },
        {
            "command": {
                "aggregate": "my_collection",
                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
                "cursor": {},
            },
            "command_stripped": {
                "aggregate": "my_collection",
                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
                "cursor": "%s",
            },
        },
        {
            "command": {
                "aggregate": "my_collection",
                "pipeline": [
                    {"$match": {"email": "x@somewhere.io"}},
                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
                ],
                "cursor": {},
            },
            "command_stripped": {
                "aggregate": "my_collection",
                "pipeline": [
                    {"$match": {"email": "%s"}},
                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
                ],
                "cursor": "%s",
            },
        },
        {
            "command": {
                "createIndexes": "my_collection",
                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
            },
            "command_stripped": {
                "createIndexes": "my_collection",
                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
            },
        },
        {
            "command": {
                "update": "my_collection",
                "ordered": True,
                "updates": [
                    ("q", {"email": "anton@somewhere.io"}),
                    (
                        "u",
                        {
                            "email": "anton2@somwehre.io",
                            "extra_field": "extra_content",
                            "new": "bla",
                        },
                    ),
                    ("multi", False),
                    ("upsert", False),
                ],
            },
            "command_stripped": {
                "update": "my_collection",
                "ordered": True,
                "updates": "%s",
            },
        },
        {
            "command": {
                "update": "my_collection",
                "ordered": True,
                "updates": [
                    ("q", {"email": "anton2@somwehre.io"}),
                    ("u", {"$rename": {"new": "new_field"}}),
                    ("multi", False),
                    ("upsert", False),
                ],
            },
            "command_stripped": {
                "update": "my_collection",
                "ordered": True,
                "updates": "%s",
            },
        },
        {
            "command": {
                "update": "my_collection",
                "ordered": True,
                "updates": [
                    ("q", {"email": "x@somewhere.io"}),
                    ("u", {"$rename": {"password": "pwd"}}),
                    ("multi", True),
                    ("upsert", False),
                ],
            },
            "command_stripped": {
                "update": "my_collection",
                "ordered": True,
                "updates": "%s",
            },
        },
        {
            "command": {
                "delete": "my_collection",
                "ordered": True,
                "deletes": [("q", {"username": "userx2"}), ("limit", 1)],
            },
            "command_stripped": {
                "delete": "my_collection",
                "ordered": True,
                "deletes": "%s",
            },
        },
        {
            "command": {
                "delete": "my_collection",
                "ordered": True,
                "deletes": [("q", {"email": "xplus@somewhere.io"}), ("limit", 0)],
            },
            "command_stripped": {
                "delete": "my_collection",
                "ordered": True,
                "deletes": "%s",
            },
        },
        {
            "command": {
                "findAndModify": "my_collection",
                "query": {"email": "ada@lovelace.com"},
                "new": False,
                "remove": True,
            },
            "command_stripped": {
                "findAndModify": "my_collection",
                "query": {"email": "%s"},
                "new": "%s",
                "remove": "%s",
            },
        },
        {
            "command": {
                "findAndModify": "my_collection",
                "query": {"email": "anton2@somewhere.io"},
                "new": False,
                "update": {"email": "anton3@somwehre.io", "extra_field": "xxx"},
                "upsert": False,
            },
            "command_stripped": {
                "findAndModify": "my_collection",
                "query": {"email": "%s"},
                "new": "%s",
                "update": {"email": "%s", "extra_field": "%s"},
                "upsert": "%s",
            },
        },
        {
            "command": {
                "findAndModify": "my_collection",
                "query": {"email": "anton3@somewhere.io"},
                "new": False,
                "update": {"$rename": {"extra_field": "extra_field2"}},
                "upsert": False,
            },
            "command_stripped": {
                "findAndModify": "my_collection",
                "query": {"email": "%s"},
                "new": "%s",
                "update": {"$rename": "%s"},
                "upsert": "%s",
            },
        },
        {
            "command": {
                "renameCollection": "test.my_collection",
                "to": "test.new_collection",
            },
            "command_stripped": {
                "renameCollection": "test.my_collection",
                "to": "test.new_collection",
            },
        },
        {
            "command": {"drop": "new_collection"},
            "command_stripped": {"drop": "new_collection"},
        },
    ],
)
def test_strip_pii(testcase):
    assert _strip_pii(testcase["command"]) == testcase["command_stripped"]


def test_span_origin(sentry_init, capture_events, mongo_server):
    sentry_init(
        integrations=[PyMongoIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = MongoClient(mongo_server.uri)

    with start_transaction():
        list(
            connection["test_db"]["test_collection"].find({"foobar": 1})
        )  # force query execution

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.db.pymongo"
sentry-python-2.18.0/tests/integrations/pyramid/000077500000000000000000000000001471214654000217665ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/pyramid/__init__.py000066400000000000000000000000561471214654000241000ustar00rootroot00000000000000import pytest

pytest.importorskip("pyramid")
sentry-python-2.18.0/tests/integrations/pyramid/test_pyramid.py000066400000000000000000000276661471214654000250650ustar00rootroot00000000000000import json
import logging
from io import BytesIO

import pyramid.testing
import pytest
from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.response import Response
from werkzeug.test import Client

from sentry_sdk import capture_message, add_breadcrumb
from sentry_sdk.integrations.pyramid import PyramidIntegration
from sentry_sdk.serializer import MAX_DATABAG_BREADTH
from tests.conftest import unpack_werkzeug_response


try:
    from importlib.metadata import version

    PYRAMID_VERSION = tuple(map(int, version("pyramid").split(".")))

except ImportError:
    # < py3.8
    import pkg_resources

    PYRAMID_VERSION = tuple(
        map(int, pkg_resources.get_distribution("pyramid").version.split("."))
    )


def hi(request):
    capture_message("hi")
    return Response("hi")


def hi_with_id(request):
    capture_message("hi with id")
    return Response("hi with id")


@pytest.fixture
def pyramid_config():
    config = pyramid.testing.setUp()
    try:
        config.add_route("hi", "/message")
        config.add_view(hi, route_name="hi")
        config.add_route("hi_with_id", "/message/{message_id}")
        config.add_view(hi_with_id, route_name="hi_with_id")
        yield config
    finally:
        pyramid.testing.tearDown()


@pytest.fixture
def route(pyramid_config):
    def inner(url):
        def wrapper(f):
            pyramid_config.add_route(f.__name__, url)
            pyramid_config.add_view(f, route_name=f.__name__)
            return f

        return wrapper

    return inner


@pytest.fixture
def get_client(pyramid_config):
    def inner():
        return Client(pyramid_config.make_wsgi_app())

    return inner


def test_view_exceptions(
    get_client, route, sentry_init, capture_events, capture_exceptions
):
    sentry_init(integrations=[PyramidIntegration()])
    events = capture_events()
    exceptions = capture_exceptions()

    add_breadcrumb({"message": "hi"})

    @route("/errors")
    def errors(request):
        add_breadcrumb({"message": "hi2"})
        1 / 0

    client = get_client()
    with pytest.raises(ZeroDivisionError):
        client.get("/errors")

    (error,) = exceptions
    assert isinstance(error, ZeroDivisionError)

    (event,) = events
    (breadcrumb,) = event["breadcrumbs"]["values"]
    assert breadcrumb["message"] == "hi2"
    # Checking only the last value in the exceptions list,
    # because Pyramid >= 1.9 returns a chained exception and before just a single exception
    assert event["exception"]["values"][-1]["mechanism"]["type"] == "pyramid"
    assert event["exception"]["values"][-1]["type"] == "ZeroDivisionError"


def test_has_context(route, get_client, sentry_init, capture_events):
    sentry_init(integrations=[PyramidIntegration()])
    events = capture_events()

    @route("/context_message/{msg}")
    def hi2(request):
        capture_message(request.matchdict["msg"])
        return Response("hi")

    client = get_client()
    client.get("/context_message/yoo")

    (event,) = events
    assert event["message"] == "yoo"
    assert event["request"] == {
        "env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
        "headers": {"Host": "localhost"},
        "method": "GET",
        "query_string": "",
        "url": "http://localhost/context_message/yoo",
    }
    assert event["transaction"] == "hi2"


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        ("/message", "route_name", "hi", "component"),
        ("/message", "route_pattern", "/message", "route"),
        ("/message/123456", "route_name", "hi_with_id", "component"),
        ("/message/123456", "route_pattern", "/message/{message_id}", "route"),
    ],
)
def test_transaction_style(
    sentry_init,
    get_client,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(integrations=[PyramidIntegration(transaction_style=transaction_style)])

    events = capture_events()
    client = get_client()
    client.get(url)

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


def test_large_json_request(sentry_init, capture_events, route, get_client):
    sentry_init(integrations=[PyramidIntegration()])

    data = {"foo": {"bar": "a" * 2000}}

    @route("/")
    def index(request):
        assert request.json == data
        assert request.text == json.dumps(data)
        assert not request.POST
        capture_message("hi")
        return Response("ok")

    events = capture_events()

    client = get_client()
    client.post("/", content_type="application/json", data=json.dumps(data))

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]["bar"]) == 1024


@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_flask_empty_json_request(sentry_init, capture_events, route, get_client, data):
    sentry_init(integrations=[PyramidIntegration()])

    @route("/")
    def index(request):
        assert request.json == data
        assert request.text == json.dumps(data)
        assert not request.POST
        capture_message("hi")
        return Response("ok")

    events = capture_events()

    client = get_client()
    response = client.post("/", content_type="application/json", data=json.dumps(data))
    assert response[1] == "200 OK"

    (event,) = events
    assert event["request"]["data"] == data


def test_json_not_truncated_if_max_request_body_size_is_always(
    sentry_init, capture_events, route, get_client
):
    sentry_init(integrations=[PyramidIntegration()], max_request_body_size="always")

    data = {
        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
    }

    @route("/")
    def index(request):
        assert request.json == data
        assert request.text == json.dumps(data)
        capture_message("hi")
        return Response("ok")

    events = capture_events()

    client = get_client()
    client.post("/", content_type="application/json", data=json.dumps(data))

    (event,) = events
    assert event["request"]["data"] == data


def test_files_and_form(sentry_init, capture_events, route, get_client):
    sentry_init(integrations=[PyramidIntegration()], max_request_body_size="always")

    data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}

    @route("/")
    def index(request):
        capture_message("hi")
        return Response("ok")

    events = capture_events()

    client = get_client()
    client.post("/", data=data)

    (event,) = events
    assert event["_meta"]["request"]["data"]["foo"] == {
        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
    }
    assert len(event["request"]["data"]["foo"]) == 1024

    assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
    assert not event["request"]["data"]["file"]


def test_bad_request_not_captured(
    sentry_init, pyramid_config, capture_events, route, get_client
):
    import pyramid.httpexceptions as exc

    sentry_init(integrations=[PyramidIntegration()])
    events = capture_events()

    @route("/")
    def index(request):
        raise exc.HTTPBadRequest()

    def errorhandler(exc, request):
        return Response("bad request")

    pyramid_config.add_view(errorhandler, context=exc.HTTPBadRequest)

    client = get_client()
    client.get("/")

    assert not events


def test_errorhandler_ok(
    sentry_init, pyramid_config, capture_exceptions, route, get_client
):
    sentry_init(integrations=[PyramidIntegration()])
    errors = capture_exceptions()

    @route("/")
    def index(request):
        raise Exception()

    def errorhandler(exc, request):
        return Response("bad request")

    pyramid_config.add_view(errorhandler, context=Exception)

    client = get_client()
    client.get("/")

    assert not errors


@pytest.mark.skipif(
    PYRAMID_VERSION < (1, 9),
    reason="We don't have the right hooks in older Pyramid versions",
)
def test_errorhandler_500(
    sentry_init, pyramid_config, capture_exceptions, route, get_client
):
    sentry_init(integrations=[PyramidIntegration()])
    errors = capture_exceptions()

    @route("/")
    def index(request):
        1 / 0

    def errorhandler(exc, request):
        return Response("bad request", status=500)

    pyramid_config.add_view(errorhandler, context=Exception)

    client = get_client()
    app_iter, status, headers = unpack_werkzeug_response(client.get("/"))
    assert app_iter == b"bad request"
    assert status.lower() == "500 internal server error"

    (error,) = errors

    assert isinstance(error, ZeroDivisionError)


def test_error_in_errorhandler(
    sentry_init, pyramid_config, capture_events, route, get_client
):
    sentry_init(integrations=[PyramidIntegration()])

    @route("/")
    def index(request):
        raise ValueError()

    def error_handler(err, request):
        1 / 0

    pyramid_config.add_view(error_handler, context=ValueError)

    events = capture_events()

    client = get_client()

    with pytest.raises(ZeroDivisionError):
        client.get("/")

    (event,) = events

    exception = event["exception"]["values"][-1]
    assert exception["type"] == "ZeroDivisionError"


def test_error_in_authenticated_userid(
    sentry_init, pyramid_config, capture_events, route, get_client
):
    from sentry_sdk.integrations.logging import LoggingIntegration

    sentry_init(
        send_default_pii=True,
        integrations=[
            PyramidIntegration(),
            LoggingIntegration(event_level=logging.ERROR),
        ],
    )
    logger = logging.getLogger("test_pyramid")

    class AuthenticationPolicy:
        def authenticated_userid(self, request):
            logger.warning("failed to identify user")

    pyramid_config.set_authorization_policy(ACLAuthorizationPolicy())
    pyramid_config.set_authentication_policy(AuthenticationPolicy())

    events = capture_events()

    client = get_client()
    client.get("/message")

    assert len(events) == 1

    # In `authenticated_userid` there used to be a call to `logging.error`. This would print this error in the
    # event processor of the Pyramid integration and the logging integration would capture this and send it to Sentry.
    # This is not possible anymore, because capturing that error in the logging integration would again run all the
    # event processors (from the global, isolation and current scope) and thus would again run the same pyramid
    # event processor that raised the error in the first place, leading on an infinite loop.
    # This test here is now deactivated and always passes, but it is kept here to document the problem.
    # This change in behavior is also mentioned in the migration documentation for Python SDK 2.0

    # assert "message" not in events[0].keys()


def tween_factory(handler, registry):
    def tween(request):
        try:
            response = handler(request)
        except Exception:
            mroute = request.matched_route
            if mroute and mroute.name in ("index",):
                return Response("bad request", status_code=400)
        return response

    return tween


def test_tween_ok(sentry_init, pyramid_config, capture_exceptions, route, get_client):
    sentry_init(integrations=[PyramidIntegration()])
    errors = capture_exceptions()

    @route("/")
    def index(request):
        raise Exception()

    pyramid_config.add_tween(
        "tests.integrations.pyramid.test_pyramid.tween_factory",
        under=pyramid.tweens.INGRESS,
    )

    client = get_client()
    client.get("/")

    assert not errors


def test_span_origin(sentry_init, capture_events, get_client):
    sentry_init(
        integrations=[PyramidIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = get_client()
    client.get("/message")

    (_, event) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.pyramid"
sentry-python-2.18.0/tests/integrations/quart/000077500000000000000000000000001471214654000214555ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/quart/__init__.py000066400000000000000000000000541471214654000235650ustar00rootroot00000000000000import pytest

pytest.importorskip("quart")
sentry-python-2.18.0/tests/integrations/quart/test_quart.py000066400000000000000000000367231471214654000242350ustar00rootroot00000000000000import json
import threading
from unittest import mock

import pytest

import sentry_sdk
from sentry_sdk import (
    set_tag,
    capture_message,
    capture_exception,
)
from sentry_sdk.integrations.logging import LoggingIntegration
import sentry_sdk.integrations.quart as quart_sentry

from quart import Quart, Response, abort, stream_with_context
from quart.views import View

from quart_auth import AuthUser, login_user

try:
    from quart_auth import QuartAuth

    auth_manager = QuartAuth()
except ImportError:
    from quart_auth import AuthManager

    auth_manager = AuthManager()


def quart_app_factory():
    app = Quart(__name__)
    app.debug = False
    app.config["TESTING"] = False
    app.secret_key = "haha"

    auth_manager.init_app(app)

    @app.route("/message")
    async def hi():
        capture_message("hi")
        return "ok"

    @app.route("/message/")
    async def hi_with_id(message_id):
        capture_message("hi with id")
        return "ok with id"

    @app.get("/sync/thread_ids")
    def _thread_ids_sync():
        return {
            "main": str(threading.main_thread().ident),
            "active": str(threading.current_thread().ident),
        }

    @app.get("/async/thread_ids")
    async def _thread_ids_async():
        return {
            "main": str(threading.main_thread().ident),
            "active": str(threading.current_thread().ident),
        }

    return app


@pytest.fixture(params=("manual",))
def integration_enabled_params(request):
    if request.param == "manual":
        return {"integrations": [quart_sentry.QuartIntegration()]}
    else:
        raise ValueError(request.param)


@pytest.mark.asyncio
async def test_has_context(sentry_init, capture_events):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    app = quart_app_factory()
    events = capture_events()

    client = app.test_client()
    response = await client.get("/message")
    assert response.status_code == 200

    (event,) = events
    assert event["transaction"] == "hi"
    assert "data" not in event["request"]
    assert event["request"]["url"] == "http://localhost/message"


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        ("/message", "endpoint", "hi", "component"),
        ("/message", "url", "/message", "route"),
        ("/message/123456", "endpoint", "hi_with_id", "component"),
        ("/message/123456", "url", "/message/", "route"),
    ],
)
async def test_transaction_style(
    sentry_init,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(
        integrations=[
            quart_sentry.QuartIntegration(transaction_style=transaction_style)
        ]
    )
    app = quart_app_factory()
    events = capture_events()

    client = app.test_client()
    response = await client.get(url)
    assert response.status_code == 200

    (event,) = events
    assert event["transaction"] == expected_transaction


@pytest.mark.asyncio
async def test_errors(
    sentry_init,
    capture_exceptions,
    capture_events,
    integration_enabled_params,
):
    sentry_init(**integration_enabled_params)
    app = quart_app_factory()

    @app.route("/")
    async def index():
        1 / 0

    exceptions = capture_exceptions()
    events = capture_events()

    client = app.test_client()
    try:
        await client.get("/")
    except ZeroDivisionError:
        pass

    (exc,) = exceptions
    assert isinstance(exc, ZeroDivisionError)

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "quart"


@pytest.mark.asyncio
async def test_quart_auth_not_installed(
    sentry_init, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)
    app = quart_app_factory()

    monkeypatch.setattr(quart_sentry, "quart_auth", None)

    events = capture_events()

    client = app.test_client()
    await client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


@pytest.mark.asyncio
async def test_quart_auth_not_configured(
    sentry_init, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)
    app = quart_app_factory()

    assert quart_sentry.quart_auth

    events = capture_events()
    client = app.test_client()
    await client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


@pytest.mark.asyncio
async def test_quart_auth_partially_configured(
    sentry_init, capture_events, monkeypatch, integration_enabled_params
):
    sentry_init(**integration_enabled_params)
    app = quart_app_factory()

    events = capture_events()

    client = app.test_client()
    await client.get("/message")

    (event,) = events
    assert event.get("user", {}).get("id") is None


@pytest.mark.asyncio
@pytest.mark.parametrize("send_default_pii", [True, False])
@pytest.mark.parametrize("user_id", [None, "42", "3"])
async def test_quart_auth_configured(
    send_default_pii,
    sentry_init,
    user_id,
    capture_events,
    monkeypatch,
    integration_enabled_params,
):
    sentry_init(send_default_pii=send_default_pii, **integration_enabled_params)
    app = quart_app_factory()

    @app.route("/login")
    async def login():
        if user_id is not None:
            login_user(AuthUser(user_id))
        return "ok"

    events = capture_events()

    client = app.test_client()
    assert (await client.get("/login")).status_code == 200
    assert not events

    assert (await client.get("/message")).status_code == 200

    (event,) = events
    if user_id is None or not send_default_pii:
        assert event.get("user", {}).get("id") is None
    else:
        assert event["user"]["id"] == str(user_id)


@pytest.mark.asyncio
@pytest.mark.parametrize(
    "integrations",
    [
        [quart_sentry.QuartIntegration()],
        [quart_sentry.QuartIntegration(), LoggingIntegration(event_level="ERROR")],
    ],
)
async def test_errors_not_reported_twice(sentry_init, integrations, capture_events):
    sentry_init(integrations=integrations)
    app = quart_app_factory()

    @app.route("/")
    async def index():
        try:
            1 / 0
        except Exception as e:
            app.logger.exception(e)
            raise e

    events = capture_events()

    client = app.test_client()
    # with pytest.raises(ZeroDivisionError):
    await client.get("/")

    assert len(events) == 1


@pytest.mark.asyncio
async def test_logging(sentry_init, capture_events):
    # ensure that Quart's logger magic doesn't break ours
    sentry_init(
        integrations=[
            quart_sentry.QuartIntegration(),
            LoggingIntegration(event_level="ERROR"),
        ]
    )
    app = quart_app_factory()

    @app.route("/")
    async def index():
        app.logger.error("hi")
        return "ok"

    events = capture_events()

    client = app.test_client()
    await client.get("/")

    (event,) = events
    assert event["level"] == "error"


@pytest.mark.asyncio
async def test_no_errors_without_request(sentry_init):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    app = quart_app_factory()

    async with app.app_context():
        capture_exception(ValueError())


def test_cli_commands_raise():
    app = quart_app_factory()

    if not hasattr(app, "cli"):
        pytest.skip("Too old quart version")

    from quart.cli import ScriptInfo

    @app.cli.command()
    def foo():
        1 / 0

    with pytest.raises(ZeroDivisionError):
        app.cli.main(
            args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=lambda _: app)
        )


@pytest.mark.asyncio
async def test_500(sentry_init):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    app = quart_app_factory()

    @app.route("/")
    async def index():
        1 / 0

    @app.errorhandler(500)
    async def error_handler(err):
        return "Sentry error."

    client = app.test_client()
    response = await client.get("/")

    assert (await response.get_data(as_text=True)) == "Sentry error."


@pytest.mark.asyncio
async def test_error_in_errorhandler(sentry_init, capture_events):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    app = quart_app_factory()

    @app.route("/")
    async def index():
        raise ValueError()

    @app.errorhandler(500)
    async def error_handler(err):
        1 / 0

    events = capture_events()

    client = app.test_client()

    with pytest.raises(ZeroDivisionError):
        await client.get("/")

    event1, event2 = events

    (exception,) = event1["exception"]["values"]
    assert exception["type"] == "ValueError"

    exception = event2["exception"]["values"][-1]
    assert exception["type"] == "ZeroDivisionError"


@pytest.mark.asyncio
async def test_bad_request_not_captured(sentry_init, capture_events):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    app = quart_app_factory()
    events = capture_events()

    @app.route("/")
    async def index():
        abort(400)

    client = app.test_client()

    await client.get("/")

    assert not events


@pytest.mark.asyncio
async def test_does_not_leak_scope(sentry_init, capture_events):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    app = quart_app_factory()
    events = capture_events()

    sentry_sdk.get_isolation_scope().set_tag("request_data", False)

    @app.route("/")
    async def index():
        sentry_sdk.get_isolation_scope().set_tag("request_data", True)

        async def generate():
            for row in range(1000):
                assert sentry_sdk.get_isolation_scope()._tags["request_data"]

                yield str(row) + "\n"

        return Response(stream_with_context(generate)(), mimetype="text/csv")

    client = app.test_client()
    response = await client.get("/")
    assert (await response.get_data(as_text=True)) == "".join(
        str(row) + "\n" for row in range(1000)
    )
    assert not events
    assert not sentry_sdk.get_isolation_scope()._tags["request_data"]


@pytest.mark.asyncio
async def test_scoped_test_client(sentry_init):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    app = quart_app_factory()

    @app.route("/")
    async def index():
        return "ok"

    async with app.test_client() as client:
        response = await client.get("/")
        assert response.status_code == 200


@pytest.mark.asyncio
@pytest.mark.parametrize("exc_cls", [ZeroDivisionError, Exception])
async def test_errorhandler_for_exception_swallows_exception(
    sentry_init, capture_events, exc_cls
):
    # In contrast to error handlers for a status code, error
    # handlers for exceptions can swallow the exception (this is
    # just how the Quart signal works)
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    app = quart_app_factory()
    events = capture_events()

    @app.route("/")
    async def index():
        1 / 0

    @app.errorhandler(exc_cls)
    async def zerodivision(e):
        return "ok"

    async with app.test_client() as client:
        response = await client.get("/")
        assert response.status_code == 200

    assert not events


@pytest.mark.asyncio
async def test_tracing_success(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[quart_sentry.QuartIntegration()])
    app = quart_app_factory()

    @app.before_request
    async def _():
        set_tag("before_request", "yes")

    @app.route("/message_tx")
    async def hi_tx():
        set_tag("view", "yes")
        capture_message("hi")
        return "ok"

    events = capture_events()

    async with app.test_client() as client:
        response = await client.get("/message_tx")
        assert response.status_code == 200

    message_event, transaction_event = events

    assert transaction_event["type"] == "transaction"
    assert transaction_event["transaction"] == "hi_tx"
    assert transaction_event["tags"]["view"] == "yes"
    assert transaction_event["tags"]["before_request"] == "yes"

    assert message_event["message"] == "hi"
    assert message_event["transaction"] == "hi_tx"
    assert message_event["tags"]["view"] == "yes"
    assert message_event["tags"]["before_request"] == "yes"


@pytest.mark.asyncio
async def test_tracing_error(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, integrations=[quart_sentry.QuartIntegration()])
    app = quart_app_factory()

    events = capture_events()

    @app.route("/error")
    async def error():
        1 / 0

    async with app.test_client() as client:
        response = await client.get("/error")
        assert response.status_code == 500

    error_event, transaction_event = events

    assert transaction_event["type"] == "transaction"
    assert transaction_event["transaction"] == "error"

    assert error_event["transaction"] == "error"
    (exception,) = error_event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"


@pytest.mark.asyncio
async def test_class_based_views(sentry_init, capture_events):
    sentry_init(integrations=[quart_sentry.QuartIntegration()])
    app = quart_app_factory()
    events = capture_events()

    @app.route("/")
    class HelloClass(View):
        methods = ["GET"]

        async def dispatch_request(self):
            capture_message("hi")
            return "ok"

    app.add_url_rule("/hello-class/", view_func=HelloClass.as_view("hello_class"))

    async with app.test_client() as client:
        response = await client.get("/hello-class/")
        assert response.status_code == 200

    (event,) = events

    assert event["message"] == "hi"
    assert event["transaction"] == "hello_class"


@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
@pytest.mark.asyncio
async def test_active_thread_id(
    sentry_init, capture_envelopes, teardown_profiling, endpoint
):
    with mock.patch(
        "sentry_sdk.profiler.transaction_profiler.PROFILE_MINIMUM_SAMPLES", 0
    ):
        sentry_init(
            traces_sample_rate=1.0,
            profiles_sample_rate=1.0,
        )
        app = quart_app_factory()

        envelopes = capture_envelopes()

        async with app.test_client() as client:
            response = await client.get(endpoint)
            assert response.status_code == 200

        data = json.loads(await response.get_data(as_text=True))

        envelopes = [envelope for envelope in envelopes]
        assert len(envelopes) == 1

        profiles = [item for item in envelopes[0].items if item.type == "profile"]
        assert len(profiles) == 1, envelopes[0].items

        for item in profiles:
            transactions = item.payload.json["transactions"]
            assert len(transactions) == 1
            assert str(data["active"]) == transactions[0]["active_thread_id"]

        transactions = [
            item for item in envelopes[0].items if item.type == "transaction"
        ]
        assert len(transactions) == 1

        for item in transactions:
            transaction = item.payload.json
            trace_context = transaction["contexts"]["trace"]
            assert str(data["active"]) == trace_context["data"]["thread.id"]


@pytest.mark.asyncio
async def test_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[quart_sentry.QuartIntegration()],
        traces_sample_rate=1.0,
    )
    app = quart_app_factory()
    events = capture_events()

    client = app.test_client()
    await client.get("/message")

    (_, event) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.quart"
sentry-python-2.18.0/tests/integrations/ray/000077500000000000000000000000001471214654000211145ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/ray/__init__.py000066400000000000000000000000521471214654000232220ustar00rootroot00000000000000import pytest

pytest.importorskip("ray")
sentry-python-2.18.0/tests/integrations/ray/test_ray.py000066400000000000000000000134471471214654000233310ustar00rootroot00000000000000import json
import os
import pytest

import ray

import sentry_sdk
from sentry_sdk.envelope import Envelope
from sentry_sdk.integrations.ray import RayIntegration
from tests.conftest import TestTransport


class RayTestTransport(TestTransport):
    def __init__(self):
        self.envelopes = []
        super().__init__()

    def capture_envelope(self, envelope: Envelope) -> None:
        self.envelopes.append(envelope)


class RayLoggingTransport(TestTransport):
    def __init__(self):
        super().__init__()

    def capture_envelope(self, envelope: Envelope) -> None:
        print(envelope.serialize().decode("utf-8", "replace"))


def setup_sentry_with_logging_transport():
    setup_sentry(transport=RayLoggingTransport())


def setup_sentry(transport=None):
    sentry_sdk.init(
        integrations=[RayIntegration()],
        transport=RayTestTransport() if transport is None else transport,
        traces_sample_rate=1.0,
    )


@pytest.mark.forked
def test_ray_tracing():
    setup_sentry()

    ray.init(
        runtime_env={
            "worker_process_setup_hook": setup_sentry,
            "working_dir": "./",
        }
    )

    @ray.remote
    def example_task():
        with sentry_sdk.start_span(op="task", name="example task step"):
            ...

        return sentry_sdk.get_client().transport.envelopes

    with sentry_sdk.start_transaction(op="task", name="ray test transaction"):
        worker_envelopes = ray.get(example_task.remote())

    client_envelope = sentry_sdk.get_client().transport.envelopes[0]
    client_transaction = client_envelope.get_transaction_event()
    worker_envelope = worker_envelopes[0]
    worker_transaction = worker_envelope.get_transaction_event()

    assert (
        client_transaction["contexts"]["trace"]["trace_id"]
        == client_transaction["contexts"]["trace"]["trace_id"]
    )

    for span in client_transaction["spans"]:
        assert (
            span["trace_id"]
            == client_transaction["contexts"]["trace"]["trace_id"]
            == client_transaction["contexts"]["trace"]["trace_id"]
        )

    for span in worker_transaction["spans"]:
        assert (
            span["trace_id"]
            == client_transaction["contexts"]["trace"]["trace_id"]
            == client_transaction["contexts"]["trace"]["trace_id"]
        )


@pytest.mark.forked
def test_ray_spans():
    setup_sentry()

    ray.init(
        runtime_env={
            "worker_process_setup_hook": setup_sentry,
            "working_dir": "./",
        }
    )

    @ray.remote
    def example_task():
        return sentry_sdk.get_client().transport.envelopes

    with sentry_sdk.start_transaction(op="task", name="ray test transaction"):
        worker_envelopes = ray.get(example_task.remote())

    client_envelope = sentry_sdk.get_client().transport.envelopes[0]
    client_transaction = client_envelope.get_transaction_event()
    worker_envelope = worker_envelopes[0]
    worker_transaction = worker_envelope.get_transaction_event()

    for span in client_transaction["spans"]:
        assert span["op"] == "queue.submit.ray"
        assert span["origin"] == "auto.queue.ray"

    for span in worker_transaction["spans"]:
        assert span["op"] == "queue.task.ray"
        assert span["origin"] == "auto.queue.ray"


@pytest.mark.forked
def test_ray_errors():
    setup_sentry_with_logging_transport()

    ray.init(
        runtime_env={
            "worker_process_setup_hook": setup_sentry_with_logging_transport,
            "working_dir": "./",
        }
    )

    @ray.remote
    def example_task():
        1 / 0

    with sentry_sdk.start_transaction(op="task", name="ray test transaction"):
        with pytest.raises(ZeroDivisionError):
            future = example_task.remote()
            ray.get(future)

    job_id = future.job_id().hex()

    # Read the worker log output containing the error
    log_dir = "/tmp/ray/session_latest/logs/"
    log_file = [
        f
        for f in os.listdir(log_dir)
        if "worker" in f and job_id in f and f.endswith(".out")
    ][0]
    with open(os.path.join(log_dir, log_file), "r") as file:
        lines = file.readlines()
        # parse error object from log line
        error = json.loads(lines[4][:-1])

    assert error["level"] == "error"
    assert (
        error["transaction"]
        == "tests.integrations.ray.test_ray.test_ray_errors..example_task"
    )  # its in the worker, not the client thus not "ray test transaction"
    assert error["exception"]["values"][0]["mechanism"]["type"] == "ray"
    assert not error["exception"]["values"][0]["mechanism"]["handled"]


@pytest.mark.forked
def test_ray_actor():
    setup_sentry()

    ray.init(
        runtime_env={
            "worker_process_setup_hook": setup_sentry,
            "working_dir": "./",
        }
    )

    @ray.remote
    class Counter:
        def __init__(self):
            self.n = 0

        def increment(self):
            with sentry_sdk.start_span(op="task", name="example task step"):
                self.n += 1

            return sentry_sdk.get_client().transport.envelopes

    with sentry_sdk.start_transaction(op="task", name="ray test transaction"):
        counter = Counter.remote()
        worker_envelopes = ray.get(counter.increment.remote())

    # Currently no transactions/spans are captured in actors
    assert worker_envelopes == []

    client_envelope = sentry_sdk.get_client().transport.envelopes[0]
    client_transaction = client_envelope.get_transaction_event()

    assert (
        client_transaction["contexts"]["trace"]["trace_id"]
        == client_transaction["contexts"]["trace"]["trace_id"]
    )

    for span in client_transaction["spans"]:
        assert (
            span["trace_id"]
            == client_transaction["contexts"]["trace"]["trace_id"]
            == client_transaction["contexts"]["trace"]["trace_id"]
        )
sentry-python-2.18.0/tests/integrations/redis/000077500000000000000000000000001471214654000214275ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/redis/__init__.py000066400000000000000000000000541471214654000235370ustar00rootroot00000000000000import pytest

pytest.importorskip("redis")
sentry-python-2.18.0/tests/integrations/redis/asyncio/000077500000000000000000000000001471214654000230745ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/redis/asyncio/__init__.py000066400000000000000000000000711471214654000252030ustar00rootroot00000000000000import pytest

pytest.importorskip("fakeredis.aioredis")
sentry-python-2.18.0/tests/integrations/redis/asyncio/test_redis_asyncio.py000066400000000000000000000061111471214654000273370ustar00rootroot00000000000000import pytest

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration
from tests.conftest import ApproxDict

from fakeredis.aioredis import FakeRedis


@pytest.mark.asyncio
async def test_async_basic(sentry_init, capture_events):
    sentry_init(integrations=[RedisIntegration()])
    events = capture_events()

    connection = FakeRedis()

    await connection.get("foobar")
    capture_message("hi")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb == {
        "category": "redis",
        "message": "GET 'foobar'",
        "data": {
            "db.operation": "GET",
            "redis.key": "foobar",
            "redis.command": "GET",
            "redis.is_cluster": False,
        },
        "timestamp": crumb["timestamp"],
        "type": "redis",
    }


@pytest.mark.parametrize(
    "is_transaction, send_default_pii, expected_first_ten",
    [
        (False, False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
        (True, True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
    ],
)
@pytest.mark.asyncio
async def test_async_redis_pipeline(
    sentry_init, capture_events, is_transaction, send_default_pii, expected_first_ten
):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    connection = FakeRedis()
    with start_transaction():
        pipeline = connection.pipeline(transaction=is_transaction)
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        await pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"] == ApproxDict(
        {
            "redis.commands": {
                "count": 3,
                "first_ten": expected_first_ten,
            },
            SPANDATA.DB_SYSTEM: "redis",
            SPANDATA.DB_NAME: "0",
            SPANDATA.SERVER_ADDRESS: connection.connection_pool.connection_kwargs.get(
                "host"
            ),
            SPANDATA.SERVER_PORT: 6379,
        }
    )
    assert span["tags"] == {
        "redis.transaction": is_transaction,
        "redis.is_cluster": False,
    }


@pytest.mark.asyncio
async def test_async_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeRedis()
    with start_transaction(name="custom_transaction"):
        # default case
        await connection.set("somekey", "somevalue")

        # pipeline
        pipeline = connection.pipeline(transaction=False)
        pipeline.get("somekey")
        pipeline.set("anotherkey", 1)
        await pipeline.execute()

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"

    for span in event["spans"]:
        assert span["origin"] == "auto.db.redis"
sentry-python-2.18.0/tests/integrations/redis/cluster/000077500000000000000000000000001471214654000231105ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/redis/cluster/__init__.py000066400000000000000000000000641471214654000252210ustar00rootroot00000000000000import pytest

pytest.importorskip("redis.cluster")
sentry-python-2.18.0/tests/integrations/redis/cluster/test_redis_cluster.py000066400000000000000000000120271471214654000273720ustar00rootroot00000000000000import pytest
from sentry_sdk import capture_message
from sentry_sdk.consts import SPANDATA
from sentry_sdk.api import start_transaction
from sentry_sdk.integrations.redis import RedisIntegration
from tests.conftest import ApproxDict

import redis


@pytest.fixture(autouse=True)
def monkeypatch_rediscluster_class(reset_integrations):
    pipeline_cls = redis.cluster.ClusterPipeline
    redis.cluster.NodesManager.initialize = lambda *_, **__: None
    redis.RedisCluster.command = lambda *_: []
    redis.RedisCluster.pipeline = lambda *_, **__: pipeline_cls(None, None)
    redis.RedisCluster.get_default_node = lambda *_, **__: redis.cluster.ClusterNode(
        "localhost", 6379
    )
    pipeline_cls.execute = lambda *_, **__: None
    redis.RedisCluster.execute_command = lambda *_, **__: []


def test_rediscluster_breadcrumb(sentry_init, capture_events):
    sentry_init(integrations=[RedisIntegration()])
    events = capture_events()

    rc = redis.RedisCluster(host="localhost", port=6379)
    rc.get("foobar")
    capture_message("hi")

    (event,) = events
    crumbs = event["breadcrumbs"]["values"]

    # on initializing a RedisCluster, a COMMAND call is made - this is not important for the test
    # but must be accounted for
    assert len(crumbs) in (1, 2)
    assert len(crumbs) == 1 or crumbs[0]["message"] == "COMMAND"

    crumb = crumbs[-1]

    assert crumb == {
        "category": "redis",
        "message": "GET 'foobar'",
        "data": {
            "db.operation": "GET",
            "redis.key": "foobar",
            "redis.command": "GET",
            "redis.is_cluster": True,
        },
        "timestamp": crumb["timestamp"],
        "type": "redis",
    }


@pytest.mark.parametrize(
    "send_default_pii, description",
    [
        (False, "SET 'bar' [Filtered]"),
        (True, "SET 'bar' 1"),
    ],
)
def test_rediscluster_basic(sentry_init, capture_events, send_default_pii, description):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    with start_transaction():
        rc = redis.RedisCluster(host="localhost", port=6379)
        rc.set("bar", 1)

    (event,) = events
    spans = event["spans"]

    # on initializing a RedisCluster, a COMMAND call is made - this is not important for the test
    # but must be accounted for
    assert len(spans) in (1, 2)
    assert len(spans) == 1 or spans[0]["description"] == "COMMAND"

    span = spans[-1]
    assert span["op"] == "db.redis"
    assert span["description"] == description
    assert span["data"] == ApproxDict(
        {
            SPANDATA.DB_SYSTEM: "redis",
            # ClusterNode converts localhost to 127.0.0.1
            SPANDATA.SERVER_ADDRESS: "127.0.0.1",
            SPANDATA.SERVER_PORT: 6379,
        }
    )
    assert span["tags"] == {
        "db.operation": "SET",
        "redis.command": "SET",
        "redis.is_cluster": True,
        "redis.key": "bar",
    }


@pytest.mark.parametrize(
    "send_default_pii, expected_first_ten",
    [
        (False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
        (True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
    ],
)
def test_rediscluster_pipeline(
    sentry_init, capture_events, send_default_pii, expected_first_ten
):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    rc = redis.RedisCluster(host="localhost", port=6379)
    with start_transaction():
        pipeline = rc.pipeline()
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"] == ApproxDict(
        {
            "redis.commands": {
                "count": 3,
                "first_ten": expected_first_ten,
            },
            SPANDATA.DB_SYSTEM: "redis",
            # ClusterNode converts localhost to 127.0.0.1
            SPANDATA.SERVER_ADDRESS: "127.0.0.1",
            SPANDATA.SERVER_PORT: 6379,
        }
    )
    assert span["tags"] == {
        "redis.transaction": False,  # For Cluster, this is always False
        "redis.is_cluster": True,
    }


def test_rediscluster_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    rc = redis.RedisCluster(host="localhost", port=6379)
    with start_transaction(name="custom_transaction"):
        # default case
        rc.set("somekey", "somevalue")

        # pipeline
        pipeline = rc.pipeline(transaction=False)
        pipeline.get("somekey")
        pipeline.set("anotherkey", 1)
        pipeline.execute()

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"

    for span in event["spans"]:
        assert span["origin"] == "auto.db.redis"
sentry-python-2.18.0/tests/integrations/redis/cluster_asyncio/000077500000000000000000000000001471214654000246355ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/redis/cluster_asyncio/__init__.py000066400000000000000000000000741471214654000267470ustar00rootroot00000000000000import pytest

pytest.importorskip("redis.asyncio.cluster")
sentry-python-2.18.0/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py000066400000000000000000000115241471214654000326450ustar00rootroot00000000000000import pytest

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration
from tests.conftest import ApproxDict

from redis.asyncio import cluster


async def fake_initialize(*_, **__):
    return None


async def fake_execute_command(*_, **__):
    return []


async def fake_execute(*_, **__):
    return None


@pytest.fixture(autouse=True)
def monkeypatch_rediscluster_asyncio_class(reset_integrations):
    pipeline_cls = cluster.ClusterPipeline
    cluster.NodesManager.initialize = fake_initialize
    cluster.RedisCluster.get_default_node = lambda *_, **__: cluster.ClusterNode(
        "localhost", 6379
    )
    cluster.RedisCluster.pipeline = lambda self, *_, **__: pipeline_cls(self)
    pipeline_cls.execute = fake_execute
    cluster.RedisCluster.execute_command = fake_execute_command


@pytest.mark.asyncio
async def test_async_breadcrumb(sentry_init, capture_events):
    sentry_init(integrations=[RedisIntegration()])
    events = capture_events()

    connection = cluster.RedisCluster(host="localhost", port=6379)

    await connection.get("foobar")
    capture_message("hi")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb == {
        "category": "redis",
        "message": "GET 'foobar'",
        "data": ApproxDict(
            {
                "db.operation": "GET",
                "redis.key": "foobar",
                "redis.command": "GET",
                "redis.is_cluster": True,
            }
        ),
        "timestamp": crumb["timestamp"],
        "type": "redis",
    }


@pytest.mark.parametrize(
    "send_default_pii, description",
    [
        (False, "SET 'bar' [Filtered]"),
        (True, "SET 'bar' 1"),
    ],
)
@pytest.mark.asyncio
async def test_async_basic(sentry_init, capture_events, send_default_pii, description):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    connection = cluster.RedisCluster(host="localhost", port=6379)
    with start_transaction():
        await connection.set("bar", 1)

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == description
    assert span["data"] == ApproxDict(
        {
            SPANDATA.DB_SYSTEM: "redis",
            # ClusterNode converts localhost to 127.0.0.1
            SPANDATA.SERVER_ADDRESS: "127.0.0.1",
            SPANDATA.SERVER_PORT: 6379,
        }
    )
    assert span["tags"] == {
        "redis.is_cluster": True,
        "db.operation": "SET",
        "redis.command": "SET",
        "redis.key": "bar",
    }


@pytest.mark.parametrize(
    "send_default_pii, expected_first_ten",
    [
        (False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
        (True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
    ],
)
@pytest.mark.asyncio
async def test_async_redis_pipeline(
    sentry_init, capture_events, send_default_pii, expected_first_ten
):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    connection = cluster.RedisCluster(host="localhost", port=6379)
    with start_transaction():
        pipeline = connection.pipeline()
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        await pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"] == ApproxDict(
        {
            "redis.commands": {
                "count": 3,
                "first_ten": expected_first_ten,
            },
            SPANDATA.DB_SYSTEM: "redis",
            # ClusterNode converts localhost to 127.0.0.1
            SPANDATA.SERVER_ADDRESS: "127.0.0.1",
            SPANDATA.SERVER_PORT: 6379,
        }
    )
    assert span["tags"] == {
        "redis.transaction": False,
        "redis.is_cluster": True,
    }


@pytest.mark.asyncio
async def test_async_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = cluster.RedisCluster(host="localhost", port=6379)
    with start_transaction(name="custom_transaction"):
        # default case
        await connection.set("somekey", "somevalue")

        # pipeline
        pipeline = connection.pipeline(transaction=False)
        pipeline.get("somekey")
        pipeline.set("anotherkey", 1)
        await pipeline.execute()

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"

    for span in event["spans"]:
        assert span["origin"] == "auto.db.redis"
sentry-python-2.18.0/tests/integrations/redis/test_redis.py000066400000000000000000000226421471214654000241540ustar00rootroot00000000000000from unittest import mock

import pytest
from fakeredis import FakeStrictRedis

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration


MOCK_CONNECTION_POOL = mock.MagicMock()
MOCK_CONNECTION_POOL.connection_kwargs = {
    "host": "localhost",
    "port": 63791,
    "db": 1,
}


def test_basic(sentry_init, capture_events):
    sentry_init(integrations=[RedisIntegration()])
    events = capture_events()

    connection = FakeStrictRedis()

    connection.get("foobar")
    capture_message("hi")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb == {
        "category": "redis",
        "message": "GET 'foobar'",
        "data": {
            "redis.key": "foobar",
            "redis.command": "GET",
            "redis.is_cluster": False,
            "db.operation": "GET",
        },
        "timestamp": crumb["timestamp"],
        "type": "redis",
    }


@pytest.mark.parametrize(
    "is_transaction, send_default_pii, expected_first_ten",
    [
        (False, False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
        (True, True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
    ],
)
def test_redis_pipeline(
    sentry_init, capture_events, is_transaction, send_default_pii, expected_first_ten
):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with start_transaction():
        pipeline = connection.pipeline(transaction=is_transaction)
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
    assert span["data"]["redis.commands"] == {
        "count": 3,
        "first_ten": expected_first_ten,
    }
    assert span["tags"] == {
        "redis.transaction": is_transaction,
        "redis.is_cluster": False,
    }


def test_sensitive_data(sentry_init, capture_events):
    # fakeredis does not support the AUTH command, so we need to mock it
    with mock.patch(
        "sentry_sdk.integrations.redis.utils._COMMANDS_INCLUDING_SENSITIVE_DATA",
        ["get"],
    ):
        sentry_init(
            integrations=[RedisIntegration()],
            traces_sample_rate=1.0,
            send_default_pii=True,
        )
        events = capture_events()

        connection = FakeStrictRedis()
        with start_transaction():
            connection.get(
                "this is super secret"
            )  # because fakeredis does not support AUTH we use GET instead

        (event,) = events
        spans = event["spans"]
        assert spans[0]["op"] == "db.redis"
        assert spans[0]["description"] == "GET [Filtered]"


def test_pii_data_redacted(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with start_transaction():
        connection.set("somekey1", "my secret string1")
        connection.set("somekey2", "my secret string2")
        connection.get("somekey2")
        connection.delete("somekey1", "somekey2")

    (event,) = events
    spans = event["spans"]
    assert spans[0]["op"] == "db.redis"
    assert spans[0]["description"] == "SET 'somekey1' [Filtered]"
    assert spans[1]["description"] == "SET 'somekey2' [Filtered]"
    assert spans[2]["description"] == "GET 'somekey2'"
    assert spans[3]["description"] == "DEL 'somekey1' [Filtered]"


def test_pii_data_sent(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=True,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with start_transaction():
        connection.set("somekey1", "my secret string1")
        connection.set("somekey2", "my secret string2")
        connection.get("somekey2")
        connection.delete("somekey1", "somekey2")

    (event,) = events
    spans = event["spans"]
    assert spans[0]["op"] == "db.redis"
    assert spans[0]["description"] == "SET 'somekey1' 'my secret string1'"
    assert spans[1]["description"] == "SET 'somekey2' 'my secret string2'"
    assert spans[2]["description"] == "GET 'somekey2'"
    assert spans[3]["description"] == "DEL 'somekey1' 'somekey2'"


def test_data_truncation(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=True,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with start_transaction():
        long_string = "a" * 100000
        connection.set("somekey1", long_string)
        short_string = "b" * 10
        connection.set("somekey2", short_string)

    (event,) = events
    spans = event["spans"]
    assert spans[0]["op"] == "db.redis"
    assert spans[0]["description"] == "SET 'somekey1' '%s..." % (
        long_string[: 1024 - len("...") - len("SET 'somekey1' '")],
    )
    assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,)


def test_data_truncation_custom(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration(max_data_size=30)],
        traces_sample_rate=1.0,
        send_default_pii=True,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with start_transaction():
        long_string = "a" * 100000
        connection.set("somekey1", long_string)
        short_string = "b" * 10
        connection.set("somekey2", short_string)

    (event,) = events
    spans = event["spans"]
    assert spans[0]["op"] == "db.redis"
    assert spans[0]["description"] == "SET 'somekey1' '%s..." % (
        long_string[: 30 - len("...") - len("SET 'somekey1' '")],
    )
    assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,)


def test_breadcrumbs(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration(max_data_size=30)],
        send_default_pii=True,
    )
    events = capture_events()

    connection = FakeStrictRedis()

    long_string = "a" * 100000
    connection.set("somekey1", long_string)
    short_string = "b" * 10
    connection.set("somekey2", short_string)

    capture_message("hi")

    (event,) = events
    crumbs = event["breadcrumbs"]["values"]

    assert crumbs[0] == {
        "message": "SET 'somekey1' 'aaaaaaaaaaa...",
        "type": "redis",
        "category": "redis",
        "data": {
            "db.operation": "SET",
            "redis.is_cluster": False,
            "redis.command": "SET",
            "redis.key": "somekey1",
        },
        "timestamp": crumbs[0]["timestamp"],
    }
    assert crumbs[1] == {
        "message": "SET 'somekey2' 'bbbbbbbbbb'",
        "type": "redis",
        "category": "redis",
        "data": {
            "db.operation": "SET",
            "redis.is_cluster": False,
            "redis.command": "SET",
            "redis.key": "somekey2",
        },
        "timestamp": crumbs[1]["timestamp"],
    }


def test_db_connection_attributes_client(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[RedisIntegration()],
    )
    events = capture_events()

    with start_transaction():
        connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL)
        connection.get("foobar")

    (event,) = events
    (span,) = event["spans"]

    assert span["op"] == "db.redis"
    assert span["description"] == "GET 'foobar'"
    assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
    assert span["data"][SPANDATA.DB_NAME] == "1"
    assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
    assert span["data"][SPANDATA.SERVER_PORT] == 63791


def test_db_connection_attributes_pipeline(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[RedisIntegration()],
    )
    events = capture_events()

    with start_transaction():
        connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL)
        pipeline = connection.pipeline(transaction=False)
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        pipeline.execute()

    (event,) = events
    (span,) = event["spans"]

    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
    assert span["data"][SPANDATA.DB_NAME] == "1"
    assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
    assert span["data"][SPANDATA.SERVER_PORT] == 63791


def test_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with start_transaction(name="custom_transaction"):
        # default case
        connection.set("somekey", "somevalue")

        # pipeline
        pipeline = connection.pipeline(transaction=False)
        pipeline.get("somekey")
        pipeline.set("anotherkey", 1)
        pipeline.execute()

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"

    for span in event["spans"]:
        assert span["origin"] == "auto.db.redis"
sentry-python-2.18.0/tests/integrations/redis/test_redis_cache_module.py000066400000000000000000000233021471214654000266360ustar00rootroot00000000000000import uuid

import pytest

import fakeredis
from fakeredis import FakeStrictRedis

from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations.redis.utils import _get_safe_key, _key_as_string
from sentry_sdk.utils import parse_version
import sentry_sdk


FAKEREDIS_VERSION = parse_version(fakeredis.__version__)


def test_no_cache_basic(sentry_init, capture_events):
    sentry_init(
        integrations=[
            RedisIntegration(),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with sentry_sdk.start_transaction():
        connection.get("mycachekey")

    (event,) = events
    spans = event["spans"]
    assert len(spans) == 1
    assert spans[0]["op"] == "db.redis"


def test_cache_basic(sentry_init, capture_events):
    sentry_init(
        integrations=[
            RedisIntegration(
                cache_prefixes=["mycache"],
            ),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with sentry_sdk.start_transaction():
        connection.hget("mycachekey", "myfield")
        connection.get("mycachekey")
        connection.set("mycachekey1", "bla")
        connection.setex("mycachekey2", 10, "blub")
        connection.mget("mycachekey1", "mycachekey2")

    (event,) = events
    spans = event["spans"]
    assert len(spans) == 9

    # no cache support for hget command
    assert spans[0]["op"] == "db.redis"
    assert spans[0]["tags"]["redis.command"] == "HGET"

    assert spans[1]["op"] == "cache.get"
    assert spans[2]["op"] == "db.redis"
    assert spans[2]["tags"]["redis.command"] == "GET"

    assert spans[3]["op"] == "cache.put"
    assert spans[4]["op"] == "db.redis"
    assert spans[4]["tags"]["redis.command"] == "SET"

    assert spans[5]["op"] == "cache.put"
    assert spans[6]["op"] == "db.redis"
    assert spans[6]["tags"]["redis.command"] == "SETEX"

    assert spans[7]["op"] == "cache.get"
    assert spans[8]["op"] == "db.redis"
    assert spans[8]["tags"]["redis.command"] == "MGET"


def test_cache_keys(sentry_init, capture_events):
    sentry_init(
        integrations=[
            RedisIntegration(
                cache_prefixes=["bla", "blub"],
            ),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with sentry_sdk.start_transaction():
        connection.get("somethingelse")
        connection.get("blub")
        connection.get("blubkeything")
        connection.get("bl")

    (event,) = events
    spans = event["spans"]
    assert len(spans) == 6
    assert spans[0]["op"] == "db.redis"
    assert spans[0]["description"] == "GET 'somethingelse'"

    assert spans[1]["op"] == "cache.get"
    assert spans[1]["description"] == "blub"
    assert spans[2]["op"] == "db.redis"
    assert spans[2]["description"] == "GET 'blub'"

    assert spans[3]["op"] == "cache.get"
    assert spans[3]["description"] == "blubkeything"
    assert spans[4]["op"] == "db.redis"
    assert spans[4]["description"] == "GET 'blubkeything'"

    assert spans[5]["op"] == "db.redis"
    assert spans[5]["description"] == "GET 'bl'"


def test_cache_data(sentry_init, capture_events):
    sentry_init(
        integrations=[
            RedisIntegration(
                cache_prefixes=["mycache"],
            ),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeStrictRedis(host="mycacheserver.io", port=6378)
    with sentry_sdk.start_transaction():
        connection.get("mycachekey")
        connection.set("mycachekey", "事实胜于雄辩")
        connection.get("mycachekey")

    (event,) = events
    spans = event["spans"]

    assert len(spans) == 6

    assert spans[0]["op"] == "cache.get"
    assert spans[0]["description"] == "mycachekey"
    assert spans[0]["data"]["cache.key"] == [
        "mycachekey",
    ]
    assert spans[0]["data"]["cache.hit"] == False  # noqa: E712
    assert "cache.item_size" not in spans[0]["data"]
    # very old fakeredis can not handle port and/or host.
    # only applicable for Redis v3
    if FAKEREDIS_VERSION <= (2, 7, 1):
        assert "network.peer.port" not in spans[0]["data"]
    else:
        assert spans[0]["data"]["network.peer.port"] == 6378
    if FAKEREDIS_VERSION <= (1, 7, 1):
        assert "network.peer.address" not in spans[0]["data"]
    else:
        assert spans[0]["data"]["network.peer.address"] == "mycacheserver.io"

    assert spans[1]["op"] == "db.redis"  # we ignore db spans in this test.

    assert spans[2]["op"] == "cache.put"
    assert spans[2]["description"] == "mycachekey"
    assert spans[2]["data"]["cache.key"] == [
        "mycachekey",
    ]
    assert "cache.hit" not in spans[1]["data"]
    assert spans[2]["data"]["cache.item_size"] == 18
    # very old fakeredis can not handle port.
    # only used with redis v3
    if FAKEREDIS_VERSION <= (2, 7, 1):
        assert "network.peer.port" not in spans[2]["data"]
    else:
        assert spans[2]["data"]["network.peer.port"] == 6378
    if FAKEREDIS_VERSION <= (1, 7, 1):
        assert "network.peer.address" not in spans[2]["data"]
    else:
        assert spans[2]["data"]["network.peer.address"] == "mycacheserver.io"

    assert spans[3]["op"] == "db.redis"  # we ignore db spans in this test.

    assert spans[4]["op"] == "cache.get"
    assert spans[4]["description"] == "mycachekey"
    assert spans[4]["data"]["cache.key"] == [
        "mycachekey",
    ]
    assert spans[4]["data"]["cache.hit"] == True  # noqa: E712
    assert spans[4]["data"]["cache.item_size"] == 18
    # very old fakeredis can not handle port.
    # only used with redis v3
    if FAKEREDIS_VERSION <= (2, 7, 1):
        assert "network.peer.port" not in spans[4]["data"]
    else:
        assert spans[4]["data"]["network.peer.port"] == 6378
    if FAKEREDIS_VERSION <= (1, 7, 1):
        assert "network.peer.address" not in spans[4]["data"]
    else:
        assert spans[4]["data"]["network.peer.address"] == "mycacheserver.io"

    assert spans[5]["op"] == "db.redis"  # we ignore db spans in this test.


def test_cache_prefixes(sentry_init, capture_events):
    sentry_init(
        integrations=[
            RedisIntegration(
                cache_prefixes=["yes"],
            ),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeStrictRedis()
    with sentry_sdk.start_transaction():
        connection.mget("yes", "no")
        connection.mget("no", 1, "yes")
        connection.mget("no", "yes.1", "yes.2")
        connection.mget("no.1", "no.2", "no.3")
        connection.mget("no.1", "no.2", "no.actually.yes")
        connection.mget(b"no.3", b"yes.5")
        connection.mget(uuid.uuid4().bytes)
        connection.mget(uuid.uuid4().bytes, "yes")

    (event,) = events

    spans = event["spans"]
    assert len(spans) == 13  # 8 db spans + 5 cache spans

    cache_spans = [span for span in spans if span["op"] == "cache.get"]
    assert len(cache_spans) == 5

    assert cache_spans[0]["description"] == "yes, no"
    assert cache_spans[1]["description"] == "no, 1, yes"
    assert cache_spans[2]["description"] == "no, yes.1, yes.2"
    assert cache_spans[3]["description"] == "no.3, yes.5"
    assert cache_spans[4]["description"] == ", yes"


@pytest.mark.parametrize(
    "method_name,args,kwargs,expected_key",
    [
        (None, None, None, None),
        ("", None, None, None),
        ("set", ["bla", "valuebla"], None, ("bla",)),
        ("setex", ["bla", 10, "valuebla"], None, ("bla",)),
        ("get", ["bla"], None, ("bla",)),
        ("mget", ["bla", "blub", "foo"], None, ("bla", "blub", "foo")),
        ("set", [b"bla", "valuebla"], None, (b"bla",)),
        ("setex", [b"bla", 10, "valuebla"], None, (b"bla",)),
        ("get", [b"bla"], None, (b"bla",)),
        ("mget", [b"bla", "blub", "foo"], None, (b"bla", "blub", "foo")),
        ("not-important", None, {"something": "bla"}, None),
        ("not-important", None, {"key": None}, None),
        ("not-important", None, {"key": "bla"}, ("bla",)),
        ("not-important", None, {"key": b"bla"}, (b"bla",)),
        ("not-important", None, {"key": []}, None),
        (
            "not-important",
            None,
            {
                "key": [
                    "bla",
                ]
            },
            ("bla",),
        ),
        (
            "not-important",
            None,
            {"key": [b"bla", "blub", "foo"]},
            (b"bla", "blub", "foo"),
        ),
        (
            "not-important",
            None,
            {"key": b"\x00c\x0f\xeaC\xe1L\x1c\xbff\xcb\xcc\xc1\xed\xc6\t"},
            (b"\x00c\x0f\xeaC\xe1L\x1c\xbff\xcb\xcc\xc1\xed\xc6\t",),
        ),
        (
            "get",
            [b"\x00c\x0f\xeaC\xe1L\x1c\xbff\xcb\xcc\xc1\xed\xc6\t"],
            None,
            (b"\x00c\x0f\xeaC\xe1L\x1c\xbff\xcb\xcc\xc1\xed\xc6\t",),
        ),
        (
            "get",
            [123],
            None,
            (123,),
        ),
    ],
)
def test_get_safe_key(method_name, args, kwargs, expected_key):
    assert _get_safe_key(method_name, args, kwargs) == expected_key


@pytest.mark.parametrize(
    "key,expected_key",
    [
        (None, ""),
        (("bla",), "bla"),
        (("bla", "blub", "foo"), "bla, blub, foo"),
        ((b"bla",), "bla"),
        ((b"bla", "blub", "foo"), "bla, blub, foo"),
        (
            [
                "bla",
            ],
            "bla",
        ),
        (["bla", "blub", "foo"], "bla, blub, foo"),
        ([uuid.uuid4().bytes], ""),
        ({"key1": 1, "key2": 2}, "key1, key2"),
        (1, "1"),
        ([1, 2, 3, b"hello"], "1, 2, 3, hello"),
    ],
)
def test_key_as_string(key, expected_key):
    assert _key_as_string(key) == expected_key
sentry-python-2.18.0/tests/integrations/redis/test_redis_cache_module_async.py000066400000000000000000000132251471214654000300360ustar00rootroot00000000000000import pytest

try:
    import fakeredis
    from fakeredis.aioredis import FakeRedis as FakeRedisAsync
except ModuleNotFoundError:
    FakeRedisAsync = None

if FakeRedisAsync is None:
    pytest.skip(
        "Skipping tests because fakeredis.aioredis not available",
        allow_module_level=True,
    )

from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.utils import parse_version
import sentry_sdk


FAKEREDIS_VERSION = parse_version(fakeredis.__version__)


@pytest.mark.asyncio
async def test_no_cache_basic(sentry_init, capture_events):
    sentry_init(
        integrations=[
            RedisIntegration(),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeRedisAsync()
    with sentry_sdk.start_transaction():
        await connection.get("myasynccachekey")

    (event,) = events
    spans = event["spans"]
    assert len(spans) == 1
    assert spans[0]["op"] == "db.redis"


@pytest.mark.asyncio
async def test_cache_basic(sentry_init, capture_events):
    sentry_init(
        integrations=[
            RedisIntegration(
                cache_prefixes=["myasynccache"],
            ),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeRedisAsync()
    with sentry_sdk.start_transaction():
        await connection.get("myasynccachekey")

    (event,) = events
    spans = event["spans"]
    assert len(spans) == 2

    assert spans[0]["op"] == "cache.get"
    assert spans[1]["op"] == "db.redis"


@pytest.mark.asyncio
async def test_cache_keys(sentry_init, capture_events):
    sentry_init(
        integrations=[
            RedisIntegration(
                cache_prefixes=["abla", "ablub"],
            ),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeRedisAsync()
    with sentry_sdk.start_transaction():
        await connection.get("asomethingelse")
        await connection.get("ablub")
        await connection.get("ablubkeything")
        await connection.get("abl")

    (event,) = events
    spans = event["spans"]
    assert len(spans) == 6
    assert spans[0]["op"] == "db.redis"
    assert spans[0]["description"] == "GET 'asomethingelse'"

    assert spans[1]["op"] == "cache.get"
    assert spans[1]["description"] == "ablub"
    assert spans[2]["op"] == "db.redis"
    assert spans[2]["description"] == "GET 'ablub'"

    assert spans[3]["op"] == "cache.get"
    assert spans[3]["description"] == "ablubkeything"
    assert spans[4]["op"] == "db.redis"
    assert spans[4]["description"] == "GET 'ablubkeything'"

    assert spans[5]["op"] == "db.redis"
    assert spans[5]["description"] == "GET 'abl'"


@pytest.mark.asyncio
async def test_cache_data(sentry_init, capture_events):
    sentry_init(
        integrations=[
            RedisIntegration(
                cache_prefixes=["myasynccache"],
            ),
        ],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    connection = FakeRedisAsync(host="mycacheserver.io", port=6378)
    with sentry_sdk.start_transaction():
        await connection.get("myasynccachekey")
        await connection.set("myasynccachekey", "事实胜于雄辩")
        await connection.get("myasynccachekey")

    (event,) = events
    spans = event["spans"]

    assert len(spans) == 6

    assert spans[0]["op"] == "cache.get"
    assert spans[0]["description"] == "myasynccachekey"
    assert spans[0]["data"]["cache.key"] == [
        "myasynccachekey",
    ]
    assert spans[0]["data"]["cache.hit"] == False  # noqa: E712
    assert "cache.item_size" not in spans[0]["data"]
    # very old fakeredis can not handle port and/or host.
    # only applicable for Redis v3
    if FAKEREDIS_VERSION <= (2, 7, 1):
        assert "network.peer.port" not in spans[0]["data"]
    else:
        assert spans[0]["data"]["network.peer.port"] == 6378
    if FAKEREDIS_VERSION <= (1, 7, 1):
        assert "network.peer.address" not in spans[0]["data"]
    else:
        assert spans[0]["data"]["network.peer.address"] == "mycacheserver.io"

    assert spans[1]["op"] == "db.redis"  # we ignore db spans in this test.

    assert spans[2]["op"] == "cache.put"
    assert spans[2]["description"] == "myasynccachekey"
    assert spans[2]["data"]["cache.key"] == [
        "myasynccachekey",
    ]
    assert "cache.hit" not in spans[1]["data"]
    assert spans[2]["data"]["cache.item_size"] == 18
    # very old fakeredis can not handle port.
    # only used with redis v3
    if FAKEREDIS_VERSION <= (2, 7, 1):
        assert "network.peer.port" not in spans[2]["data"]
    else:
        assert spans[2]["data"]["network.peer.port"] == 6378
    if FAKEREDIS_VERSION <= (1, 7, 1):
        assert "network.peer.address" not in spans[2]["data"]
    else:
        assert spans[2]["data"]["network.peer.address"] == "mycacheserver.io"

    assert spans[3]["op"] == "db.redis"  # we ignore db spans in this test.

    assert spans[4]["op"] == "cache.get"
    assert spans[4]["description"] == "myasynccachekey"
    assert spans[4]["data"]["cache.key"] == [
        "myasynccachekey",
    ]
    assert spans[4]["data"]["cache.hit"] == True  # noqa: E712
    assert spans[4]["data"]["cache.item_size"] == 18
    # very old fakeredis can not handle port.
    # only used with redis v3
    if FAKEREDIS_VERSION <= (2, 7, 1):
        assert "network.peer.port" not in spans[4]["data"]
    else:
        assert spans[4]["data"]["network.peer.port"] == 6378
    if FAKEREDIS_VERSION <= (1, 7, 1):
        assert "network.peer.address" not in spans[4]["data"]
    else:
        assert spans[4]["data"]["network.peer.address"] == "mycacheserver.io"

    assert spans[5]["op"] == "db.redis"  # we ignore db spans in this test.
sentry-python-2.18.0/tests/integrations/redis_py_cluster_legacy/000077500000000000000000000000001471214654000252245ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/redis_py_cluster_legacy/__init__.py000066400000000000000000000000631471214654000273340ustar00rootroot00000000000000import pytest

pytest.importorskip("rediscluster")
sentry-python-2.18.0/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py000066400000000000000000000115521471214654000335440ustar00rootroot00000000000000from unittest import mock

import pytest
import rediscluster

from sentry_sdk import capture_message
from sentry_sdk.api import start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration
from tests.conftest import ApproxDict


MOCK_CONNECTION_POOL = mock.MagicMock()
MOCK_CONNECTION_POOL.connection_kwargs = {
    "host": "localhost",
    "port": 63791,
    "db": 1,
}


rediscluster_classes = [rediscluster.RedisCluster]

if hasattr(rediscluster, "StrictRedisCluster"):
    rediscluster_classes.append(rediscluster.StrictRedisCluster)


@pytest.fixture(autouse=True)
def monkeypatch_rediscluster_classes(reset_integrations):
    try:
        pipeline_cls = rediscluster.pipeline.ClusterPipeline
    except AttributeError:
        pipeline_cls = rediscluster.StrictClusterPipeline
    rediscluster.RedisCluster.pipeline = lambda *_, **__: pipeline_cls(
        connection_pool=MOCK_CONNECTION_POOL
    )
    pipeline_cls.execute = lambda *_, **__: None
    for cls in rediscluster_classes:
        cls.execute_command = lambda *_, **__: None


@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events):
    sentry_init(integrations=[RedisIntegration()])
    events = capture_events()

    rc = rediscluster_cls(connection_pool=MOCK_CONNECTION_POOL)
    rc.get("foobar")
    capture_message("hi")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb == {
        "category": "redis",
        "message": "GET 'foobar'",
        "data": ApproxDict(
            {
                "db.operation": "GET",
                "redis.key": "foobar",
                "redis.command": "GET",
                "redis.is_cluster": True,
            }
        ),
        "timestamp": crumb["timestamp"],
        "type": "redis",
    }


@pytest.mark.parametrize(
    "send_default_pii, expected_first_ten",
    [
        (False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
        (True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
    ],
)
def test_rediscluster_pipeline(
    sentry_init, capture_events, send_default_pii, expected_first_ten
):
    sentry_init(
        integrations=[RedisIntegration()],
        traces_sample_rate=1.0,
        send_default_pii=send_default_pii,
    )
    events = capture_events()

    rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL)
    with start_transaction():
        pipeline = rc.pipeline()
        pipeline.get("foo")
        pipeline.set("bar", 1)
        pipeline.set("baz", 2)
        pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"] == ApproxDict(
        {
            "redis.commands": {
                "count": 3,
                "first_ten": expected_first_ten,
            },
            SPANDATA.DB_SYSTEM: "redis",
            SPANDATA.DB_NAME: "1",
            SPANDATA.SERVER_ADDRESS: "localhost",
            SPANDATA.SERVER_PORT: 63791,
        }
    )
    assert span["tags"] == {
        "redis.transaction": False,  # For Cluster, this is always False
        "redis.is_cluster": True,
    }


@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
def test_db_connection_attributes_client(sentry_init, capture_events, rediscluster_cls):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[RedisIntegration()],
    )
    events = capture_events()

    rc = rediscluster_cls(connection_pool=MOCK_CONNECTION_POOL)
    with start_transaction():
        rc.get("foobar")

    (event,) = events
    (span,) = event["spans"]

    assert span["data"] == ApproxDict(
        {
            SPANDATA.DB_SYSTEM: "redis",
            SPANDATA.DB_NAME: "1",
            SPANDATA.SERVER_ADDRESS: "localhost",
            SPANDATA.SERVER_PORT: 63791,
        }
    )


@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
def test_db_connection_attributes_pipeline(
    sentry_init, capture_events, rediscluster_cls
):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[RedisIntegration()],
    )
    events = capture_events()

    rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL)
    with start_transaction():
        pipeline = rc.pipeline()
        pipeline.get("foo")
        pipeline.execute()

    (event,) = events
    (span,) = event["spans"]
    assert span["op"] == "db.redis"
    assert span["description"] == "redis.pipeline.execute"
    assert span["data"] == ApproxDict(
        {
            "redis.commands": {
                "count": 1,
                "first_ten": ["GET 'foo'"],
            },
            SPANDATA.DB_SYSTEM: "redis",
            SPANDATA.DB_NAME: "1",
            SPANDATA.SERVER_ADDRESS: "localhost",
            SPANDATA.SERVER_PORT: 63791,
        }
    )
sentry-python-2.18.0/tests/integrations/requests/000077500000000000000000000000001471214654000221745ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/requests/__init__.py000066400000000000000000000000571471214654000243070ustar00rootroot00000000000000import pytest

pytest.importorskip("requests")
sentry-python-2.18.0/tests/integrations/requests/test_requests.py000066400000000000000000000036671471214654000254740ustar00rootroot00000000000000from unittest import mock

import pytest
import requests
import responses

from sentry_sdk import capture_message
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.stdlib import StdlibIntegration
from tests.conftest import ApproxDict


def test_crumb_capture(sentry_init, capture_events):
    sentry_init(integrations=[StdlibIntegration()])

    url = "http://example.com/"
    responses.add(responses.GET, url, status=200)

    events = capture_events()

    response = requests.get(url)
    capture_message("Testing!")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]
    assert crumb["type"] == "http"
    assert crumb["category"] == "httplib"
    assert crumb["data"] == ApproxDict(
        {
            "url": url,
            SPANDATA.HTTP_METHOD: "GET",
            SPANDATA.HTTP_FRAGMENT: "",
            SPANDATA.HTTP_QUERY: "",
            SPANDATA.HTTP_STATUS_CODE: response.status_code,
            "reason": response.reason,
        }
    )


@pytest.mark.tests_internal_exceptions
def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
    sentry_init(integrations=[StdlibIntegration()])

    url = "https://example.com"
    responses.add(responses.GET, url, status=200)

    events = capture_events()

    with mock.patch(
        "sentry_sdk.integrations.stdlib.parse_url",
        side_effect=ValueError,
    ):
        response = requests.get(url)

    capture_message("Testing!")

    (event,) = events
    assert event["breadcrumbs"]["values"][0]["data"] == ApproxDict(
        {
            SPANDATA.HTTP_METHOD: "GET",
            SPANDATA.HTTP_STATUS_CODE: response.status_code,
            "reason": response.reason,
            # no url related data
        }
    )

    assert "url" not in event["breadcrumbs"]["values"][0]["data"]
    assert SPANDATA.HTTP_FRAGMENT not in event["breadcrumbs"]["values"][0]["data"]
    assert SPANDATA.HTTP_QUERY not in event["breadcrumbs"]["values"][0]["data"]
sentry-python-2.18.0/tests/integrations/rq/000077500000000000000000000000001471214654000207435ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/rq/__init__.py000066400000000000000000000000511471214654000230500ustar00rootroot00000000000000import pytest

pytest.importorskip("rq")
sentry-python-2.18.0/tests/integrations/rq/test_rq.py000066400000000000000000000216621471214654000230050ustar00rootroot00000000000000from unittest import mock

import pytest
import rq
from fakeredis import FakeStrictRedis

import sentry_sdk
from sentry_sdk import start_transaction
from sentry_sdk.integrations.rq import RqIntegration
from sentry_sdk.utils import parse_version


@pytest.fixture(autouse=True)
def _patch_rq_get_server_version(monkeypatch):
    """
    Patch RQ lower than 1.5.1 to work with fakeredis.

    https://github.com/jamesls/fakeredis/issues/273
    """
    try:
        from distutils.version import StrictVersion
    except ImportError:
        return

    if parse_version(rq.VERSION) <= (1, 5, 1):
        for k in (
            "rq.job.Job.get_redis_server_version",
            "rq.worker.Worker.get_redis_server_version",
        ):
            try:
                monkeypatch.setattr(k, lambda _: StrictVersion("4.0.0"))
            except AttributeError:
                # old RQ Job/Worker doesn't have a get_redis_server_version attr
                pass


def crashing_job(foo):
    1 / 0


def chew_up_shoes(dog, human, shoes):
    raise Exception("{}!! Why did you eat {}'s {}??".format(dog, human, shoes))


def do_trick(dog, trick):
    return "{}, can you {}? Good dog!".format(dog, trick)


def test_basic(sentry_init, capture_events):
    sentry_init(integrations=[RqIntegration()])
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(crashing_job, foo=42)
    worker.work(burst=True)

    (event,) = events

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    assert exception["mechanism"]["type"] == "rq"
    assert exception["stacktrace"]["frames"][-1]["vars"]["foo"] == "42"

    assert event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"

    extra = event["extra"]["rq-job"]
    assert extra["args"] == []
    assert extra["kwargs"] == {"foo": 42}
    assert extra["description"] == "tests.integrations.rq.test_rq.crashing_job(foo=42)"
    assert extra["func"] == "tests.integrations.rq.test_rq.crashing_job"
    assert "job_id" in extra
    assert "enqueued_at" in extra

    # older versions don't persist started_at correctly
    if tuple(map(int, rq.VERSION.split("."))) >= (0, 9):
        assert "started_at" in extra


def test_transport_shutdown(sentry_init, capture_events_forksafe):
    sentry_init(integrations=[RqIntegration()])

    events = capture_events_forksafe()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.Worker([queue], connection=queue.connection)

    queue.enqueue(crashing_job, foo=42)
    worker.work(burst=True)

    event = events.read_event()
    events.read_flush()

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"


def test_transaction_with_error(
    sentry_init, capture_events, DictionaryContaining  # noqa:N803
):
    sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(chew_up_shoes, "Charlie", "Katie", shoes="flip-flops")
    worker.work(burst=True)

    error_event, envelope = events

    assert error_event["transaction"] == "tests.integrations.rq.test_rq.chew_up_shoes"
    assert error_event["contexts"]["trace"]["op"] == "queue.task.rq"
    assert error_event["exception"]["values"][0]["type"] == "Exception"
    assert (
        error_event["exception"]["values"][0]["value"]
        == "Charlie!! Why did you eat Katie's flip-flops??"
    )

    assert envelope["type"] == "transaction"
    assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"]
    assert envelope["transaction"] == error_event["transaction"]
    assert envelope["extra"]["rq-job"] == DictionaryContaining(
        {
            "args": ["Charlie", "Katie"],
            "kwargs": {"shoes": "flip-flops"},
            "func": "tests.integrations.rq.test_rq.chew_up_shoes",
            "description": "tests.integrations.rq.test_rq.chew_up_shoes('Charlie', 'Katie', shoes='flip-flops')",
        }
    )


def test_error_has_trace_context_if_tracing_disabled(
    sentry_init,
    capture_events,
):
    sentry_init(integrations=[RqIntegration()])
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(crashing_job, foo=None)
    worker.work(burst=True)

    (error_event,) = events

    assert error_event["contexts"]["trace"]


def test_tracing_enabled(
    sentry_init,
    capture_events,
):
    sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    with start_transaction(op="rq transaction") as transaction:
        queue.enqueue(crashing_job, foo=None)
        worker.work(burst=True)

    error_event, envelope, _ = events

    assert error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
    assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id

    assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"]


def test_tracing_disabled(
    sentry_init,
    capture_events,
):
    sentry_init(integrations=[RqIntegration()])
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    scope = sentry_sdk.get_isolation_scope()
    queue.enqueue(crashing_job, foo=None)
    worker.work(burst=True)

    (error_event,) = events

    assert error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
    assert (
        error_event["contexts"]["trace"]["trace_id"]
        == scope._propagation_context.trace_id
    )


def test_transaction_no_error(
    sentry_init, capture_events, DictionaryContaining  # noqa:N803
):
    sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(do_trick, "Maisey", trick="kangaroo")
    worker.work(burst=True)

    envelope = events[0]

    assert envelope["type"] == "transaction"
    assert envelope["contexts"]["trace"]["op"] == "queue.task.rq"
    assert envelope["transaction"] == "tests.integrations.rq.test_rq.do_trick"
    assert envelope["extra"]["rq-job"] == DictionaryContaining(
        {
            "args": ["Maisey"],
            "kwargs": {"trick": "kangaroo"},
            "func": "tests.integrations.rq.test_rq.do_trick",
            "description": "tests.integrations.rq.test_rq.do_trick('Maisey', trick='kangaroo')",
        }
    )


def test_traces_sampler_gets_correct_values_in_sampling_context(
    sentry_init, DictionaryContaining, ObjectDescribedBy  # noqa:N803
):
    traces_sampler = mock.Mock(return_value=True)
    sentry_init(integrations=[RqIntegration()], traces_sampler=traces_sampler)

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(do_trick, "Bodhi", trick="roll over")
    worker.work(burst=True)

    traces_sampler.assert_any_call(
        DictionaryContaining(
            {
                "rq_job": ObjectDescribedBy(
                    type=rq.job.Job,
                    attrs={
                        "description": "tests.integrations.rq.test_rq.do_trick('Bodhi', trick='roll over')",
                        "result": "Bodhi, can you roll over? Good dog!",
                        "func_name": "tests.integrations.rq.test_rq.do_trick",
                        "args": ("Bodhi",),
                        "kwargs": {"trick": "roll over"},
                    },
                ),
            }
        )
    )


@pytest.mark.skipif(
    parse_version(rq.__version__) < (1, 5), reason="At least rq-1.5 required"
)
@pytest.mark.skipif(
    parse_version(rq.__version__) >= (2,),
    reason="Test broke in RQ 2.0. Investigate and fix. "
    "See https://github.com/getsentry/sentry-python/issues/3707.",
)
def test_job_with_retries(sentry_init, capture_events):
    sentry_init(integrations=[RqIntegration()])
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(crashing_job, foo=42, retry=rq.Retry(max=1))
    worker.work(burst=True)

    assert len(events) == 1


def test_span_origin(sentry_init, capture_events):
    sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    queue = rq.Queue(connection=FakeStrictRedis())
    worker = rq.SimpleWorker([queue], connection=queue.connection)

    queue.enqueue(do_trick, "Maisey", trick="kangaroo")
    worker.work(burst=True)

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "auto.queue.rq"
sentry-python-2.18.0/tests/integrations/sanic/000077500000000000000000000000001471214654000214165ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/sanic/__init__.py000066400000000000000000000000541471214654000235260ustar00rootroot00000000000000import pytest

pytest.importorskip("sanic")
sentry-python-2.18.0/tests/integrations/sanic/test_sanic.py000066400000000000000000000332131471214654000241260ustar00rootroot00000000000000import asyncio
import contextlib
import os
import random
import sys
from unittest.mock import Mock

import pytest

import sentry_sdk
from sentry_sdk import capture_message
from sentry_sdk.integrations.sanic import SanicIntegration
from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL

from sanic import Sanic, request, response, __version__ as SANIC_VERSION_RAW
from sanic.response import HTTPResponse
from sanic.exceptions import SanicException

try:
    from sanic_testing import TestManager
except ImportError:
    TestManager = None

try:
    from sanic_testing.reusable import ReusableClient
except ImportError:
    ReusableClient = None

from typing import TYPE_CHECKING

if TYPE_CHECKING:
    from collections.abc import Iterable, Container
    from typing import Any, Optional

SANIC_VERSION = tuple(map(int, SANIC_VERSION_RAW.split(".")))
PERFORMANCE_SUPPORTED = SANIC_VERSION >= (21, 9)


@pytest.fixture
def app():
    if SANIC_VERSION < (19,):
        """
        Older Sanic versions 0.8 and 18 bind to the same fixed port which
        creates problems when we run tests concurrently.
        """
        old_test_client = Sanic.test_client.__get__

        def new_test_client(self):
            client = old_test_client(self, Sanic)
            client.port += os.getpid() % 100
            return client

        Sanic.test_client = property(new_test_client)

    if SANIC_VERSION >= (20, 12) and SANIC_VERSION < (22, 6):
        # Some builds (20.12.0 intruduced and 22.6.0 removed again) have a feature where the instance is stored in an internal class
        # registry for later retrieval, and so add register=False to disable that
        sanic_app = Sanic("Test", register=False)
    else:
        sanic_app = Sanic("Test")

    if TestManager is not None:
        TestManager(sanic_app)

    @sanic_app.route("/message")
    def hi(request):
        capture_message("hi")
        return response.text("ok")

    @sanic_app.route("/message/")
    def hi_with_id(request, message_id):
        capture_message("hi with id")
        return response.text("ok with id")

    @sanic_app.route("/500")
    def fivehundred(_):
        1 / 0

    return sanic_app


def get_client(app):
    @contextlib.contextmanager
    def simple_client(app):
        yield app.test_client

    if ReusableClient is not None:
        return ReusableClient(app)
    else:
        return simple_client(app)


def test_request_data(sentry_init, app, capture_events):
    sentry_init(integrations=[SanicIntegration()])
    events = capture_events()

    c = get_client(app)
    with c as client:
        _, response = client.get("/message?foo=bar")
        assert response.status == 200

    (event,) = events
    assert event["transaction"] == "hi"
    assert event["request"]["env"] == {"REMOTE_ADDR": ""}
    assert set(event["request"]["headers"]) >= {
        "accept",
        "accept-encoding",
        "host",
        "user-agent",
    }
    assert event["request"]["query_string"] == "foo=bar"
    assert event["request"]["url"].endswith("/message")
    assert event["request"]["method"] == "GET"

    # Assert that state is not leaked
    events.clear()
    capture_message("foo")
    (event,) = events

    assert "request" not in event
    assert "transaction" not in event


@pytest.mark.parametrize(
    "url,expected_transaction,expected_source",
    [
        ("/message", "hi", "component"),
        ("/message/123456", "hi_with_id", "component"),
    ],
)
def test_transaction_name(
    sentry_init, app, capture_events, url, expected_transaction, expected_source
):
    sentry_init(integrations=[SanicIntegration()])
    events = capture_events()

    c = get_client(app)
    with c as client:
        _, response = client.get(url)
        assert response.status == 200

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


def test_errors(sentry_init, app, capture_events):
    sentry_init(integrations=[SanicIntegration()])
    events = capture_events()

    @app.route("/error")
    def myerror(request):
        raise ValueError("oh no")

    c = get_client(app)
    with c as client:
        _, response = client.get("/error")
        assert response.status == 500

    (event,) = events
    assert event["transaction"] == "myerror"
    (exception,) = event["exception"]["values"]

    assert exception["type"] == "ValueError"
    assert exception["value"] == "oh no"
    assert any(
        frame["filename"].endswith("test_sanic.py")
        for frame in exception["stacktrace"]["frames"]
    )


def test_bad_request_not_captured(sentry_init, app, capture_events):
    sentry_init(integrations=[SanicIntegration()])
    events = capture_events()

    @app.route("/")
    def index(request):
        raise SanicException("...", status_code=400)

    c = get_client(app)
    with c as client:
        _, response = client.get("/")
        assert response.status == 400

    assert not events


def test_error_in_errorhandler(sentry_init, app, capture_events):
    sentry_init(integrations=[SanicIntegration()])
    events = capture_events()

    @app.route("/error")
    def myerror(request):
        raise ValueError("oh no")

    @app.exception(ValueError)
    def myhandler(request, exception):
        1 / 0

    c = get_client(app)
    with c as client:
        _, response = client.get("/error")
        assert response.status == 500

    event1, event2 = events

    (exception,) = event1["exception"]["values"]
    assert exception["type"] == "ValueError"
    assert any(
        frame["filename"].endswith("test_sanic.py")
        for frame in exception["stacktrace"]["frames"]
    )

    exception = event2["exception"]["values"][-1]
    assert exception["type"] == "ZeroDivisionError"
    assert any(
        frame["filename"].endswith("test_sanic.py")
        for frame in exception["stacktrace"]["frames"]
    )


def test_concurrency(sentry_init, app):
    """
    Make sure we instrument Sanic in a way where request data does not leak
    between request handlers. This test also implicitly tests our concept of
    how async code should be instrumented, so if it breaks it likely has
    ramifications for other async integrations and async usercode.

    We directly call the request handler instead of using Sanic's test client
    because that's the only way we could reproduce leakage with such a low
    amount of concurrent tasks.
    """
    sentry_init(integrations=[SanicIntegration()])

    @app.route("/context-check/")
    async def context_check(request, i):
        scope = sentry_sdk.get_isolation_scope()
        scope.set_tag("i", i)

        await asyncio.sleep(random.random())

        scope = sentry_sdk.get_isolation_scope()
        assert scope._tags["i"] == i

        return response.text("ok")

    async def task(i):
        responses = []

        kwargs = {
            "url_bytes": "http://localhost/context-check/{i}".format(i=i).encode(
                "ascii"
            ),
            "headers": {},
            "version": "1.1",
            "method": "GET",
            "transport": None,
        }

        if SANIC_VERSION >= (19,):
            kwargs["app"] = app

        if SANIC_VERSION >= (21, 3):

            class MockAsyncStreamer:
                def __init__(self, request_body):
                    self.request_body = request_body
                    self.iter = iter(self.request_body)

                    if SANIC_VERSION >= (21, 12):
                        self.response = None
                        self.stage = Mock()
                    else:
                        self.response = b"success"

                def respond(self, response):
                    responses.append(response)
                    patched_response = HTTPResponse()
                    return patched_response

                def __aiter__(self):
                    return self

                async def __anext__(self):
                    try:
                        return next(self.iter)
                    except StopIteration:
                        raise StopAsyncIteration

            patched_request = request.Request(**kwargs)
            patched_request.stream = MockAsyncStreamer([b"hello", b"foo"])

            if SANIC_VERSION >= (21, 9):
                await app.dispatch(
                    "http.lifecycle.request",
                    context={"request": patched_request},
                    inline=True,
                )

            await app.handle_request(
                patched_request,
            )
        else:
            await app.handle_request(
                request.Request(**kwargs),
                write_callback=responses.append,
                stream_callback=responses.append,
            )

        (r,) = responses
        assert r.status == 200

    async def runner():
        if SANIC_VERSION >= (21, 3):
            if SANIC_VERSION >= (21, 9):
                await app._startup()
            else:
                try:
                    app.router.reset()
                    app.router.finalize()
                except AttributeError:
                    ...
        await asyncio.gather(*(task(i) for i in range(1000)))

    if sys.version_info < (3, 7):
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        loop.run_until_complete(runner())
    else:
        asyncio.run(runner())

    scope = sentry_sdk.get_isolation_scope()
    assert not scope._tags


class TransactionTestConfig:
    """
    Data class to store configurations for each performance transaction test run, including
    both the inputs and relevant expected results.
    """

    def __init__(
        self,
        integration_args,
        url,
        expected_status,
        expected_transaction_name,
        expected_source=None,
    ):
        # type: (Iterable[Optional[Container[int]]], str, int, Optional[str], Optional[str]) -> None
        """
        expected_transaction_name of None indicates we expect to not receive a transaction
        """
        self.integration_args = integration_args
        self.url = url
        self.expected_status = expected_status
        self.expected_transaction_name = expected_transaction_name
        self.expected_source = expected_source


@pytest.mark.skipif(
    not PERFORMANCE_SUPPORTED, reason="Performance not supported on this Sanic version"
)
@pytest.mark.parametrize(
    "test_config",
    [
        TransactionTestConfig(
            # Transaction for successful page load
            integration_args=(),
            url="/message",
            expected_status=200,
            expected_transaction_name="hi",
            expected_source=TRANSACTION_SOURCE_COMPONENT,
        ),
        TransactionTestConfig(
            # Transaction still recorded when we have an internal server error
            integration_args=(),
            url="/500",
            expected_status=500,
            expected_transaction_name="fivehundred",
            expected_source=TRANSACTION_SOURCE_COMPONENT,
        ),
        TransactionTestConfig(
            # By default, no transaction when we have a 404 error
            integration_args=(),
            url="/404",
            expected_status=404,
            expected_transaction_name=None,
        ),
        TransactionTestConfig(
            # With no ignored HTTP statuses, we should get transactions for 404 errors
            integration_args=(None,),
            url="/404",
            expected_status=404,
            expected_transaction_name="/404",
            expected_source=TRANSACTION_SOURCE_URL,
        ),
        TransactionTestConfig(
            # Transaction can be suppressed for other HTTP statuses, too, by passing config to the integration
            integration_args=({200},),
            url="/message",
            expected_status=200,
            expected_transaction_name=None,
        ),
    ],
)
def test_transactions(test_config, sentry_init, app, capture_events):
    # type: (TransactionTestConfig, Any, Any, Any) -> None

    # Init the SanicIntegration with the desired arguments
    sentry_init(
        integrations=[SanicIntegration(*test_config.integration_args)],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    # Make request to the desired URL
    c = get_client(app)
    with c as client:
        _, response = client.get(test_config.url)
        assert response.status == test_config.expected_status

    # Extract the transaction events by inspecting the event types. We should at most have 1 transaction event.
    transaction_events = [
        e for e in events if "type" in e and e["type"] == "transaction"
    ]
    assert len(transaction_events) <= 1

    # Get the only transaction event, or set to None if there are no transaction events.
    (transaction_event, *_) = [*transaction_events, None]

    # We should have no transaction event if and only if we expect no transactions
    assert (transaction_event is None) == (
        test_config.expected_transaction_name is None
    )

    # If a transaction was expected, ensure it is correct
    assert (
        transaction_event is None
        or transaction_event["transaction"] == test_config.expected_transaction_name
    )
    assert (
        transaction_event is None
        or transaction_event["transaction_info"]["source"]
        == test_config.expected_source
    )


@pytest.mark.skipif(
    not PERFORMANCE_SUPPORTED, reason="Performance not supported on this Sanic version"
)
def test_span_origin(sentry_init, app, capture_events):
    sentry_init(integrations=[SanicIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    c = get_client(app)
    with c as client:
        client.get("/message?foo=bar")

    (_, event) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.sanic"
sentry-python-2.18.0/tests/integrations/serverless/000077500000000000000000000000001471214654000225165ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/serverless/test_serverless.py000066400000000000000000000017441471214654000263320ustar00rootroot00000000000000import pytest

from sentry_sdk.integrations.serverless import serverless_function


def test_basic(sentry_init, capture_exceptions, monkeypatch):
    sentry_init()
    exceptions = capture_exceptions()

    flush_calls = []

    @serverless_function
    def foo():
        monkeypatch.setattr("sentry_sdk.flush", lambda: flush_calls.append(1))
        1 / 0

    with pytest.raises(ZeroDivisionError):
        foo()

    (exception,) = exceptions
    assert isinstance(exception, ZeroDivisionError)

    assert flush_calls == [1]


def test_flush_disabled(sentry_init, capture_exceptions, monkeypatch):
    sentry_init()
    exceptions = capture_exceptions()

    flush_calls = []

    monkeypatch.setattr("sentry_sdk.flush", lambda: flush_calls.append(1))

    @serverless_function(flush=False)
    def foo():
        1 / 0

    with pytest.raises(ZeroDivisionError):
        foo()

    (exception,) = exceptions
    assert isinstance(exception, ZeroDivisionError)

    assert flush_calls == []
sentry-python-2.18.0/tests/integrations/socket/000077500000000000000000000000001471214654000216115ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/socket/__init__.py000066400000000000000000000000551471214654000237220ustar00rootroot00000000000000import pytest

pytest.importorskip("socket")
sentry-python-2.18.0/tests/integrations/socket/test_socket.py000066400000000000000000000043111471214654000245110ustar00rootroot00000000000000import socket

from sentry_sdk import start_transaction
from sentry_sdk.integrations.socket import SocketIntegration
from tests.conftest import ApproxDict


def test_getaddrinfo_trace(sentry_init, capture_events):
    sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction():
        socket.getaddrinfo("example.com", 443)

    (event,) = events
    (span,) = event["spans"]

    assert span["op"] == "socket.dns"
    assert span["description"] == "example.com:443"
    assert span["data"] == ApproxDict(
        {
            "host": "example.com",
            "port": 443,
        }
    )


def test_create_connection_trace(sentry_init, capture_events):
    timeout = 10

    sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction():
        socket.create_connection(("example.com", 443), timeout, None)

    (event,) = events
    (connect_span, dns_span) = event["spans"]
    # as getaddrinfo gets called in create_connection it should also contain a dns span

    assert connect_span["op"] == "socket.connection"
    assert connect_span["description"] == "example.com:443"
    assert connect_span["data"] == ApproxDict(
        {
            "address": ["example.com", 443],
            "timeout": timeout,
            "source_address": None,
        }
    )

    assert dns_span["op"] == "socket.dns"
    assert dns_span["description"] == "example.com:443"
    assert dns_span["data"] == ApproxDict(
        {
            "host": "example.com",
            "port": 443,
        }
    )


def test_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[SocketIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    with start_transaction(name="foo"):
        socket.create_connection(("example.com", 443), 1, None)

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"

    assert event["spans"][0]["op"] == "socket.connection"
    assert event["spans"][0]["origin"] == "auto.socket.socket"

    assert event["spans"][1]["op"] == "socket.dns"
    assert event["spans"][1]["origin"] == "auto.socket.socket"
sentry-python-2.18.0/tests/integrations/spark/000077500000000000000000000000001471214654000214415ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/spark/__init__.py000066400000000000000000000001121471214654000235440ustar00rootroot00000000000000import pytest

pytest.importorskip("pyspark")
pytest.importorskip("py4j")
sentry-python-2.18.0/tests/integrations/spark/test_spark.py000066400000000000000000000157011471214654000241760ustar00rootroot00000000000000import pytest
import sys
from unittest.mock import patch
from sentry_sdk.integrations.spark.spark_driver import (
    _set_app_properties,
    _start_sentry_listener,
    SentryListener,
    SparkIntegration,
)
from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration

from pyspark import SparkContext

from py4j.protocol import Py4JJavaError

################
# DRIVER TESTS #
################


def test_set_app_properties():
    spark_context = SparkContext(appName="Testing123")
    _set_app_properties()

    assert spark_context.getLocalProperty("sentry_app_name") == "Testing123"
    # applicationId generated by sparkContext init
    assert (
        spark_context.getLocalProperty("sentry_application_id")
        == spark_context.applicationId
    )


def test_start_sentry_listener():
    spark_context = SparkContext.getOrCreate()

    gateway = spark_context._gateway
    assert gateway._callback_server is None

    _start_sentry_listener(spark_context)

    assert gateway._callback_server is not None


def test_initialize_spark_integration(sentry_init):
    sentry_init(integrations=[SparkIntegration()])
    SparkContext.getOrCreate()


@pytest.fixture
def sentry_listener():

    listener = SentryListener()

    return listener


@pytest.fixture
def mock_add_breadcrumb():
    with patch("sentry_sdk.add_breadcrumb") as mock:
        yield mock


def test_sentry_listener_on_job_start(sentry_listener, mock_add_breadcrumb):
    listener = sentry_listener

    class MockJobStart:
        def jobId(self):  # noqa: N802
            return "sample-job-id-start"

    mock_job_start = MockJobStart()
    listener.onJobStart(mock_job_start)

    mock_add_breadcrumb.assert_called_once()
    mock_hub = mock_add_breadcrumb.call_args

    assert mock_hub.kwargs["level"] == "info"
    assert "sample-job-id-start" in mock_hub.kwargs["message"]


@pytest.mark.parametrize(
    "job_result, level", [("JobSucceeded", "info"), ("JobFailed", "warning")]
)
def test_sentry_listener_on_job_end(
    sentry_listener, mock_add_breadcrumb, job_result, level
):
    listener = sentry_listener

    class MockJobResult:
        def toString(self):  # noqa: N802
            return job_result

    class MockJobEnd:
        def jobId(self):  # noqa: N802
            return "sample-job-id-end"

        def jobResult(self):  # noqa: N802
            result = MockJobResult()
            return result

    mock_job_end = MockJobEnd()
    listener.onJobEnd(mock_job_end)

    mock_add_breadcrumb.assert_called_once()
    mock_hub = mock_add_breadcrumb.call_args

    assert mock_hub.kwargs["level"] == level
    assert mock_hub.kwargs["data"]["result"] == job_result
    assert "sample-job-id-end" in mock_hub.kwargs["message"]


def test_sentry_listener_on_stage_submitted(sentry_listener, mock_add_breadcrumb):
    listener = sentry_listener

    class StageInfo:
        def stageId(self):  # noqa: N802
            return "sample-stage-id-submit"

        def name(self):
            return "run-job"

        def attemptId(self):  # noqa: N802
            return 14

    class MockStageSubmitted:
        def stageInfo(self):  # noqa: N802
            stageinf = StageInfo()
            return stageinf

    mock_stage_submitted = MockStageSubmitted()
    listener.onStageSubmitted(mock_stage_submitted)

    mock_add_breadcrumb.assert_called_once()
    mock_hub = mock_add_breadcrumb.call_args

    assert mock_hub.kwargs["level"] == "info"
    assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
    assert mock_hub.kwargs["data"]["attemptId"] == 14
    assert mock_hub.kwargs["data"]["name"] == "run-job"


@pytest.fixture
def get_mock_stage_completed():
    def _inner(failure_reason):
        class JavaException:
            def __init__(self):
                self._target_id = "id"

        class FailureReason:
            def get(self):
                if failure_reason:
                    return "failure-reason"
                else:
                    raise Py4JJavaError("msg", JavaException())

        class StageInfo:
            def stageId(self):  # noqa: N802
                return "sample-stage-id-submit"

            def name(self):
                return "run-job"

            def attemptId(self):  # noqa: N802
                return 14

            def failureReason(self):  # noqa: N802
                return FailureReason()

        class MockStageCompleted:
            def stageInfo(self):  # noqa: N802
                return StageInfo()

        return MockStageCompleted()

    return _inner


def test_sentry_listener_on_stage_completed_success(
    sentry_listener, mock_add_breadcrumb, get_mock_stage_completed
):
    listener = sentry_listener

    mock_stage_completed = get_mock_stage_completed(failure_reason=False)
    listener.onStageCompleted(mock_stage_completed)

    mock_add_breadcrumb.assert_called_once()
    mock_hub = mock_add_breadcrumb.call_args

    assert mock_hub.kwargs["level"] == "info"
    assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
    assert mock_hub.kwargs["data"]["attemptId"] == 14
    assert mock_hub.kwargs["data"]["name"] == "run-job"
    assert "reason" not in mock_hub.kwargs["data"]


def test_sentry_listener_on_stage_completed_failure(
    sentry_listener, mock_add_breadcrumb, get_mock_stage_completed
):
    listener = sentry_listener

    mock_stage_completed = get_mock_stage_completed(failure_reason=True)
    listener.onStageCompleted(mock_stage_completed)

    mock_add_breadcrumb.assert_called_once()
    mock_hub = mock_add_breadcrumb.call_args

    assert mock_hub.kwargs["level"] == "warning"
    assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
    assert mock_hub.kwargs["data"]["attemptId"] == 14
    assert mock_hub.kwargs["data"]["name"] == "run-job"
    assert mock_hub.kwargs["data"]["reason"] == "failure-reason"


################
# WORKER TESTS #
################


def test_spark_worker(monkeypatch, sentry_init, capture_events, capture_exceptions):
    import pyspark.worker as original_worker
    import pyspark.daemon as original_daemon

    from pyspark.taskcontext import TaskContext

    task_context = TaskContext._getOrCreate()

    def mock_main():
        task_context._stageId = 0
        task_context._attemptNumber = 1
        task_context._partitionId = 2
        task_context._taskAttemptId = 3

        try:
            raise ZeroDivisionError
        except ZeroDivisionError:
            sys.exit(-1)

    monkeypatch.setattr(original_worker, "main", mock_main)

    sentry_init(integrations=[SparkWorkerIntegration()])

    events = capture_events()
    exceptions = capture_exceptions()

    original_daemon.worker_main()

    # SystemExit called, but not recorded as part of event
    assert type(exceptions.pop()) == SystemExit
    assert len(events[0]["exception"]["values"]) == 1
    assert events[0]["exception"]["values"][0]["type"] == "ZeroDivisionError"

    assert events[0]["tags"] == {
        "stageId": "0",
        "attemptNumber": "1",
        "partitionId": "2",
        "taskAttemptId": "3",
    }
sentry-python-2.18.0/tests/integrations/sqlalchemy/000077500000000000000000000000001471214654000224635ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/sqlalchemy/__init__.py000066400000000000000000000004441471214654000245760ustar00rootroot00000000000000import os
import sys
import pytest

pytest.importorskip("sqlalchemy")

# Load `sqlalchemy_helpers` into the module search path to test query source path names relative to module. See
# `test_query_source_with_module_in_search_path`
sys.path.insert(0, os.path.join(os.path.dirname(__file__)))
sentry-python-2.18.0/tests/integrations/sqlalchemy/sqlalchemy_helpers/000077500000000000000000000000001471214654000263475ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/sqlalchemy/sqlalchemy_helpers/__init__.py000066400000000000000000000000001471214654000304460ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/sqlalchemy/sqlalchemy_helpers/helpers.py000066400000000000000000000003001471214654000303540ustar00rootroot00000000000000def add_model_to_session(model, session):
    session.add(model)
    session.commit()


def query_first_model_from_session(model_klass, session):
    return session.query(model_klass).first()
sentry-python-2.18.0/tests/integrations/sqlalchemy/test_sqlalchemy.py000066400000000000000000000534541471214654000262510ustar00rootroot00000000000000import os
from datetime import datetime
from unittest import mock

import pytest
from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy import text

import sentry_sdk
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, SPANDATA
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
from sentry_sdk.serializer import MAX_EVENT_BYTES
from sentry_sdk.tracing_utils import record_sql_queries
from sentry_sdk.utils import json_dumps


def test_orm_queries(sentry_init, capture_events):
    sentry_init(
        integrations=[SqlalchemyIntegration()], _experiments={"record_sql_params": True}
    )
    events = capture_events()

    Base = declarative_base()  # noqa: N806

    class Person(Base):
        __tablename__ = "person"
        id = Column(Integer, primary_key=True)
        name = Column(String(250), nullable=False)

    class Address(Base):
        __tablename__ = "address"
        id = Column(Integer, primary_key=True)
        street_name = Column(String(250))
        street_number = Column(String(250))
        post_code = Column(String(250), nullable=False)
        person_id = Column(Integer, ForeignKey("person.id"))
        person = relationship(Person)

    engine = create_engine(
        "sqlite:///:memory:", connect_args={"check_same_thread": False}
    )
    Base.metadata.create_all(engine)

    Session = sessionmaker(bind=engine)  # noqa: N806
    session = Session()

    bob = Person(name="Bob")
    session.add(bob)

    assert session.query(Person).first() == bob

    capture_message("hi")

    (event,) = events

    for crumb in event["breadcrumbs"]["values"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"]["values"][-2:] == [
        {
            "category": "query",
            "data": {"db.params": ["Bob"], "db.paramstyle": "qmark"},
            "message": "INSERT INTO person (name) VALUES (?)",
            "type": "default",
        },
        {
            "category": "query",
            "data": {"db.params": [1, 0], "db.paramstyle": "qmark"},
            "message": "SELECT person.id AS person_id, person.name AS person_name \n"
            "FROM person\n"
            " LIMIT ? OFFSET ?",
            "type": "default",
        },
    ]


def test_transactions(sentry_init, capture_events, render_span_tree):
    sentry_init(
        integrations=[SqlalchemyIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
    )
    events = capture_events()

    Base = declarative_base()  # noqa: N806

    class Person(Base):
        __tablename__ = "person"
        id = Column(Integer, primary_key=True)
        name = Column(String(250), nullable=False)

    class Address(Base):
        __tablename__ = "address"
        id = Column(Integer, primary_key=True)
        street_name = Column(String(250))
        street_number = Column(String(250))
        post_code = Column(String(250), nullable=False)
        person_id = Column(Integer, ForeignKey("person.id"))
        person = relationship(Person)

    engine = create_engine(
        "sqlite:///:memory:", connect_args={"check_same_thread": False}
    )
    Base.metadata.create_all(engine)

    Session = sessionmaker(bind=engine)  # noqa: N806
    session = Session()

    with start_transaction(name="test_transaction", sampled=True):
        with session.begin_nested():
            session.query(Person).first()

        for _ in range(2):
            with pytest.raises(IntegrityError):
                with session.begin_nested():
                    session.add(Person(id=1, name="bob"))
                    session.add(Person(id=1, name="bob"))

        with session.begin_nested():
            session.query(Person).first()

    (event,) = events

    for span in event["spans"]:
        assert span["data"][SPANDATA.DB_SYSTEM] == "sqlite"
        assert span["data"][SPANDATA.DB_NAME] == ":memory:"
        assert SPANDATA.SERVER_ADDRESS not in span["data"]
        assert SPANDATA.SERVER_PORT not in span["data"]

    assert (
        render_span_tree(event)
        == """\
- op=null: description=null
  - op="db": description="SAVEPOINT sa_savepoint_1"
  - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
  - op="db": description="RELEASE SAVEPOINT sa_savepoint_1"
  - op="db": description="SAVEPOINT sa_savepoint_2"
  - op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
  - op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_2"
  - op="db": description="SAVEPOINT sa_savepoint_3"
  - op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
  - op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_3"
  - op="db": description="SAVEPOINT sa_savepoint_4"
  - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
  - op="db": description="RELEASE SAVEPOINT sa_savepoint_4"\
"""
    )


def test_transactions_no_engine_url(sentry_init, capture_events):
    sentry_init(
        integrations=[SqlalchemyIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
    )
    events = capture_events()

    Base = declarative_base()  # noqa: N806

    class Person(Base):
        __tablename__ = "person"
        id = Column(Integer, primary_key=True)
        name = Column(String(250), nullable=False)

    class Address(Base):
        __tablename__ = "address"
        id = Column(Integer, primary_key=True)
        street_name = Column(String(250))
        street_number = Column(String(250))
        post_code = Column(String(250), nullable=False)
        person_id = Column(Integer, ForeignKey("person.id"))
        person = relationship(Person)

    engine = create_engine(
        "sqlite:///:memory:", connect_args={"check_same_thread": False}
    )
    engine.url = None
    Base.metadata.create_all(engine)

    Session = sessionmaker(bind=engine)  # noqa: N806
    session = Session()

    with start_transaction(name="test_transaction", sampled=True):
        with session.begin_nested():
            session.query(Person).first()

        for _ in range(2):
            with pytest.raises(IntegrityError):
                with session.begin_nested():
                    session.add(Person(id=1, name="bob"))
                    session.add(Person(id=1, name="bob"))

        with session.begin_nested():
            session.query(Person).first()

    (event,) = events

    for span in event["spans"]:
        assert span["data"][SPANDATA.DB_SYSTEM] == "sqlite"
        assert SPANDATA.DB_NAME not in span["data"]
        assert SPANDATA.SERVER_ADDRESS not in span["data"]
        assert SPANDATA.SERVER_PORT not in span["data"]


def test_long_sql_query_preserved(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1,
        integrations=[SqlalchemyIntegration()],
    )
    events = capture_events()

    engine = create_engine(
        "sqlite:///:memory:", connect_args={"check_same_thread": False}
    )
    with start_transaction(name="test"):
        with engine.connect() as con:
            con.execute(text(" UNION ".join("SELECT {}".format(i) for i in range(100))))

    (event,) = events
    description = event["spans"][0]["description"]
    assert description.startswith("SELECT 0 UNION SELECT 1")
    assert description.endswith("SELECT 98 UNION SELECT 99")


def test_large_event_not_truncated(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1,
        integrations=[SqlalchemyIntegration()],
    )
    events = capture_events()

    long_str = "x" * (DEFAULT_MAX_VALUE_LENGTH + 10)

    scope = sentry_sdk.get_isolation_scope()

    @scope.add_event_processor
    def processor(event, hint):
        event["message"] = long_str
        return event

    engine = create_engine(
        "sqlite:///:memory:", connect_args={"check_same_thread": False}
    )
    with start_transaction(name="test"):
        with engine.connect() as con:
            for _ in range(1500):
                con.execute(
                    text(" UNION ".join("SELECT {}".format(i) for i in range(100)))
                )

    (event,) = events

    assert len(json_dumps(event)) > MAX_EVENT_BYTES

    # Some spans are discarded.
    assert len(event["spans"]) == 1000

    # Span descriptions are not truncated.
    description = event["spans"][0]["description"]
    assert len(description) == 1583
    assert description.startswith("SELECT 0")
    assert description.endswith("SELECT 98 UNION SELECT 99")

    description = event["spans"][999]["description"]
    assert len(description) == 1583
    assert description.startswith("SELECT 0")
    assert description.endswith("SELECT 98 UNION SELECT 99")

    # Smoke check that truncation of other fields has not changed.
    assert len(event["message"]) == DEFAULT_MAX_VALUE_LENGTH

    # The _meta for other truncated fields should be there as well.
    assert event["_meta"]["message"] == {
        "": {"len": 1034, "rem": [["!limit", "x", 1021, 1024]]}
    }


def test_engine_name_not_string(sentry_init):
    sentry_init(
        integrations=[SqlalchemyIntegration()],
    )

    engine = create_engine(
        "sqlite:///:memory:", connect_args={"check_same_thread": False}
    )
    engine.dialect.name = b"sqlite"

    with engine.connect() as con:
        con.execute(text("SELECT 0"))


def test_query_source_disabled(sentry_init, capture_events):
    sentry_options = {
        "integrations": [SqlalchemyIntegration()],
        "enable_tracing": True,
        "enable_db_query_source": False,
        "db_query_source_threshold_ms": 0,
    }

    sentry_init(**sentry_options)

    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        Base = declarative_base()  # noqa: N806

        class Person(Base):
            __tablename__ = "person"
            id = Column(Integer, primary_key=True)
            name = Column(String(250), nullable=False)

        engine = create_engine(
            "sqlite:///:memory:", connect_args={"check_same_thread": False}
        )
        Base.metadata.create_all(engine)

        Session = sessionmaker(bind=engine)  # noqa: N806
        session = Session()

        bob = Person(name="Bob")
        session.add(bob)

        assert session.query(Person).first() == bob

    (event,) = events

    for span in event["spans"]:
        if span.get("op") == "db" and span.get("description").startswith(
            "SELECT person"
        ):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO not in data
            assert SPANDATA.CODE_NAMESPACE not in data
            assert SPANDATA.CODE_FILEPATH not in data
            assert SPANDATA.CODE_FUNCTION not in data
            break
    else:
        raise AssertionError("No db span found")


@pytest.mark.parametrize("enable_db_query_source", [None, True])
def test_query_source_enabled(sentry_init, capture_events, enable_db_query_source):
    sentry_options = {
        "integrations": [SqlalchemyIntegration()],
        "enable_tracing": True,
        "db_query_source_threshold_ms": 0,
    }
    if enable_db_query_source is not None:
        sentry_options["enable_db_query_source"] = enable_db_query_source

    sentry_init(**sentry_options)

    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        Base = declarative_base()  # noqa: N806

        class Person(Base):
            __tablename__ = "person"
            id = Column(Integer, primary_key=True)
            name = Column(String(250), nullable=False)

        engine = create_engine(
            "sqlite:///:memory:", connect_args={"check_same_thread": False}
        )
        Base.metadata.create_all(engine)

        Session = sessionmaker(bind=engine)  # noqa: N806
        session = Session()

        bob = Person(name="Bob")
        session.add(bob)

        assert session.query(Person).first() == bob

    (event,) = events

    for span in event["spans"]:
        if span.get("op") == "db" and span.get("description").startswith(
            "SELECT person"
        ):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data
            break
    else:
        raise AssertionError("No db span found")


def test_query_source(sentry_init, capture_events):
    sentry_init(
        integrations=[SqlalchemyIntegration()],
        enable_tracing=True,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
    )
    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        Base = declarative_base()  # noqa: N806

        class Person(Base):
            __tablename__ = "person"
            id = Column(Integer, primary_key=True)
            name = Column(String(250), nullable=False)

        engine = create_engine(
            "sqlite:///:memory:", connect_args={"check_same_thread": False}
        )
        Base.metadata.create_all(engine)

        Session = sessionmaker(bind=engine)  # noqa: N806
        session = Session()

        bob = Person(name="Bob")
        session.add(bob)

        assert session.query(Person).first() == bob

    (event,) = events

    for span in event["spans"]:
        if span.get("op") == "db" and span.get("description").startswith(
            "SELECT person"
        ):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0
            assert (
                data.get(SPANDATA.CODE_NAMESPACE)
                == "tests.integrations.sqlalchemy.test_sqlalchemy"
            )
            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                "tests/integrations/sqlalchemy/test_sqlalchemy.py"
            )

            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
            assert is_relative_path

            assert data.get(SPANDATA.CODE_FUNCTION) == "test_query_source"
            break
    else:
        raise AssertionError("No db span found")


def test_query_source_with_module_in_search_path(sentry_init, capture_events):
    """
    Test that query source is relative to the path of the module it ran in
    """
    sentry_init(
        integrations=[SqlalchemyIntegration()],
        enable_tracing=True,
        enable_db_query_source=True,
        db_query_source_threshold_ms=0,
    )
    events = capture_events()

    from sqlalchemy_helpers.helpers import (
        add_model_to_session,
        query_first_model_from_session,
    )

    with start_transaction(name="test_transaction", sampled=True):
        Base = declarative_base()  # noqa: N806

        class Person(Base):
            __tablename__ = "person"
            id = Column(Integer, primary_key=True)
            name = Column(String(250), nullable=False)

        engine = create_engine(
            "sqlite:///:memory:", connect_args={"check_same_thread": False}
        )
        Base.metadata.create_all(engine)

        Session = sessionmaker(bind=engine)  # noqa: N806
        session = Session()

        bob = Person(name="Bob")

        add_model_to_session(bob, session)

        assert query_first_model_from_session(Person, session) == bob

    (event,) = events

    for span in event["spans"]:
        if span.get("op") == "db" and span.get("description").startswith(
            "SELECT person"
        ):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0
            assert data.get(SPANDATA.CODE_NAMESPACE) == "sqlalchemy_helpers.helpers"
            assert data.get(SPANDATA.CODE_FILEPATH) == "sqlalchemy_helpers/helpers.py"

            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
            assert is_relative_path

            assert data.get(SPANDATA.CODE_FUNCTION) == "query_first_model_from_session"
            break
    else:
        raise AssertionError("No db span found")


def test_no_query_source_if_duration_too_short(sentry_init, capture_events):
    sentry_init(
        integrations=[SqlalchemyIntegration()],
        enable_tracing=True,
        enable_db_query_source=True,
        db_query_source_threshold_ms=100,
    )
    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        Base = declarative_base()  # noqa: N806

        class Person(Base):
            __tablename__ = "person"
            id = Column(Integer, primary_key=True)
            name = Column(String(250), nullable=False)

        engine = create_engine(
            "sqlite:///:memory:", connect_args={"check_same_thread": False}
        )
        Base.metadata.create_all(engine)

        Session = sessionmaker(bind=engine)  # noqa: N806
        session = Session()

        bob = Person(name="Bob")
        session.add(bob)

        class fake_record_sql_queries:  # noqa: N801
            def __init__(self, *args, **kwargs):
                with record_sql_queries(*args, **kwargs) as span:
                    self.span = span

                self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0)
                self.span.timestamp = datetime(2024, 1, 1, microsecond=99999)

            def __enter__(self):
                return self.span

            def __exit__(self, type, value, traceback):
                pass

        with mock.patch(
            "sentry_sdk.integrations.sqlalchemy.record_sql_queries",
            fake_record_sql_queries,
        ):
            assert session.query(Person).first() == bob

    (event,) = events

    for span in event["spans"]:
        if span.get("op") == "db" and span.get("description").startswith(
            "SELECT person"
        ):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO not in data
            assert SPANDATA.CODE_NAMESPACE not in data
            assert SPANDATA.CODE_FILEPATH not in data
            assert SPANDATA.CODE_FUNCTION not in data

            break
    else:
        raise AssertionError("No db span found")


def test_query_source_if_duration_over_threshold(sentry_init, capture_events):
    sentry_init(
        integrations=[SqlalchemyIntegration()],
        enable_tracing=True,
        enable_db_query_source=True,
        db_query_source_threshold_ms=100,
    )
    events = capture_events()

    with start_transaction(name="test_transaction", sampled=True):
        Base = declarative_base()  # noqa: N806

        class Person(Base):
            __tablename__ = "person"
            id = Column(Integer, primary_key=True)
            name = Column(String(250), nullable=False)

        engine = create_engine(
            "sqlite:///:memory:", connect_args={"check_same_thread": False}
        )
        Base.metadata.create_all(engine)

        Session = sessionmaker(bind=engine)  # noqa: N806
        session = Session()

        bob = Person(name="Bob")
        session.add(bob)

        class fake_record_sql_queries:  # noqa: N801
            def __init__(self, *args, **kwargs):
                with record_sql_queries(*args, **kwargs) as span:
                    self.span = span

                self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0)
                self.span.timestamp = datetime(2024, 1, 1, microsecond=101000)

            def __enter__(self):
                return self.span

            def __exit__(self, type, value, traceback):
                pass

        with mock.patch(
            "sentry_sdk.integrations.sqlalchemy.record_sql_queries",
            fake_record_sql_queries,
        ):
            assert session.query(Person).first() == bob

    (event,) = events

    for span in event["spans"]:
        if span.get("op") == "db" and span.get("description").startswith(
            "SELECT person"
        ):
            data = span.get("data", {})

            assert SPANDATA.CODE_LINENO in data
            assert SPANDATA.CODE_NAMESPACE in data
            assert SPANDATA.CODE_FILEPATH in data
            assert SPANDATA.CODE_FUNCTION in data

            assert type(data.get(SPANDATA.CODE_LINENO)) == int
            assert data.get(SPANDATA.CODE_LINENO) > 0
            assert (
                data.get(SPANDATA.CODE_NAMESPACE)
                == "tests.integrations.sqlalchemy.test_sqlalchemy"
            )
            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
                "tests/integrations/sqlalchemy/test_sqlalchemy.py"
            )

            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
            assert is_relative_path

            assert (
                data.get(SPANDATA.CODE_FUNCTION)
                == "test_query_source_if_duration_over_threshold"
            )
            break
    else:
        raise AssertionError("No db span found")


def test_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[SqlalchemyIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    engine = create_engine(
        "sqlite:///:memory:", connect_args={"check_same_thread": False}
    )
    with start_transaction(name="foo"):
        with engine.connect() as con:
            con.execute(text("SELECT 0"))

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"
    assert event["spans"][0]["origin"] == "auto.db.sqlalchemy"
sentry-python-2.18.0/tests/integrations/starlette/000077500000000000000000000000001471214654000223305ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/starlette/__init__.py000066400000000000000000000000601471214654000244350ustar00rootroot00000000000000import pytest

pytest.importorskip("starlette")
sentry-python-2.18.0/tests/integrations/starlette/photo.jpg000066400000000000000000000510261471214654000241670ustar00rootroot00000000000000JFIFHHC




C		

1x88\Ld
W-)yk\=mdPm';.6[aƠp @-'MMkGVL	ElHij.T\j:#ERpݖ-LI(MX򤪋^Zi12Nr
Kpj(}lm]jc<=L:[Ѱ|Ydy&<=h' RUmqqpNw\p2'HbNU23ZO#043i
5C~V-
^?.~7Py .xpUtX&QrIϧkqqMxyzIڹ.j\Ҁ(IEI4Rܭ{WJe-tc?PR[OL+Bi4ʡL܈湯0nk:O%ѹ<ۇmQOHUI)F:DH
6Sc4YmJP$R)g']lYRQ\Ӟ1@9c\۝-5]SԺ8w:0UK˓8gOA`1Tm"a
gݪhۮ(5!k
oHzUO6ځQ???W7zuTގJwG+YiZ751trin=TU;:9}lZi6`rpJ㞻
]#tɇLN.qNn`KVzQ)8⾸{?g\]MAi'ژ>`84Ij^J͜>&@}0kvp]l'磕-b-w*U#,:(I7zǫ(Ί3Z};jW>vXϼ4JL+bZX7Xޥ@\y$Pp\?UOe-IL-r΍tqp3{4Y?H~DuyR~JƆTC3E
zڷ[lJp@gfG9c4rN"Mhjb"&7G?:>-t"q]A:	͆X|	y&	NHJnx=򌩁ЁЀh: @}P2!jh@nNZs7Kɺv @p:|v4ܺx45ٳ]B$6'8/k9sϝYG
s6B.QrSY	״Ͻ/!X"̽!juJis-:H7|ςL;@Sc=JU)	E0\>X,5Ɣi(J`'MZ/}O7P1	634
^k O?o9L0fz9
Hp(3^M9s8% @{4"z"b?O,L*VmrÍ/§K*=47~F~򓒓Z"@ǙqzCeq_(GFX `w7Ho|[RIK+tglw_s,GEw^ʝ	WdX'~A(NK&0	id@I9e{!{$fjb2r0X|Xū]R^oT$#n/yGIғز$Z>5FHG,ܩ/؄UVGa|\čf9IߊFaǖ"NJe0dv3壒NÑFC-SɎϒH=y$+͚#Dq*f#ʖLM1㯷><)E6PCׇB:T,8G
"Mh-\>],9d(e`	L?v_>/oӢ}7)#bXF9W @,c	և#G2LDjiW%ɿA䱭YFο'
=^[iGX@1ECj!#9CL5xq!zHĹ8ZN~%6p-~Y0t+^CĽFH/
SJ|A/̓)$ct]eHsr3 P8"NEeq<@#Td>VE߫kefJ1pr~lj,tzv
F<ww˲km*VXՙ|9ьBuؽEwzf"O=V1Zt8uW/M좓#×?W_9mHj[pTOt=$h?r^>JlewҢ"eH7Id{V}OieWeÝ>RuɀAq'+uT'~H|\#u/9PX!gxN#5u]=J[1A$P6뚹]tC{I8u|8z/G4s'
y+9LYi\TKIpc]4l~4ƹμ:5R^_U\ǚFzېHBą#9cu5J4Ŏs%d^V|ThPuݞM ē\Y߅wϺ!q[udB]s2RRW:b\Yk_#w!#㖕~ꆻ? "h{d^)~'{ݛ_^?~qv?+ !01A2"Q3@#Baq?CQYa{}FHMt8aK\^)	y`Xck>@14Ƣٯ%/1Rfjf#%Cō봙3'b%G#Lig0PYB}?xj-_?CF
Z7-q/!l϶q=t"=v+p2LcAdlw/H|;vFCbcD	{q
`"&a>U00۝gMO8x:Knv'\!6I_"N{AOY>S@[ۯXe8)l hvl֡rzۡ*!-ӷ3[[78>T=?Y8ͽ?MvSR1/Uv@4ƋgL0,g6wZ|v39#c
頎N[߮FG% 0!12A@BQ?ژlgheeJl:e7ɆI|8F|PPwe&II-wAgT@sߘ%	>O8;Eɖa<>O,"[#gɶ-mh6,[pt鍪kNjFcւrgXjtVB;^^[W[}Kl1FAjgG%[d
Jf^ADl*ZW(OOIN@D&44F|0S!%=qxѧtXh?
|X@B;EB@mQm:ga!HŬ_PD6Gi	e|$橳Vqxm0ICM[GJ'5s<^EcU*Q
AyZ&
>D96ЕGL>vPརTEKJbc(U[|NquTKhTU'@V/nmbX@Hf|
xo"rց@T	RFx*(v
B%,t<cpw75UGwB.t9q^daUq<#BUgU
WO6SVP@A4crQfkkUltrqRLF%Yj(ݤU1|]iSHZbA1>[5h:;F؅Te|}QOƒtYE)bU@VfY^;+0Y&p94aXe^]ϟE7tqs(HvX)}U^B`/_cj;r;U*Tkm^Lha2P<Mz5$xk=yYE#*0siTU:>'z.Q7(fcnEUje뼽k
z$:L4fve]o(cӴl]96>Xs<|DW4GTOWX1dh#%~.8jDIb0`4}LQry΄΋sq<0R%:v 7,JMs5k¿U6(⠆?*rXhfwI&?~G\o䳛h,
Rq`F߽ѡDa%ł%NUFY,>buLmDMC4LncY-]ҶX)ꂝy#}$<^gP%>ՠs`n,]LQ$d΃?\?Xs *6sV}+
&r0]0\YşeE'p6'Lje#	zdPQ,v1eF/9U7f7U9.,*i7Uu_pktNL#Hvy,~*20cu'JL[>Fh	?U*FqFLE&E=5s\HECY6?)bXС/f)s7'a)_~ČG-9{80p>4QߐJg).$<#D	58h2l,
C92cF%K͙jσUbfFa7F+^^<O?*1UU`~꒩E!t*osgnSL
fOm]JUANb>62Ĵ`.{>f)
Lߛl$FH-d'x>k?Bŵk1A:5_Kj[#PD	Y{rCbYYdE=;F9h{{cbך(dwqGu!udq۰
qW,}F[t=Y#~5!r_*VqX"1@z=\ּWB?éc\bBd1xaSDVMB73˚+,&}G@ۇ͓Kl=s~lLk>sruEd}k,FAIp/zE鳙o?%[7cy64yrʃAEo6]e!)B
,ԋZ)s JЄifj8y0(RW6~+·n[E㛤ê;3v)>9X[ХgF3i(ӀJwiuS
0*ҝ㺳9>F,[JnYsM۷`".Py\GJ죚dekgёg騺MbFl)3FjK]幘K\KRǨ6.¹6SDt/:"UV&?upTCڸO(UƸcfdFi5ϲ*E
*5`&g!Nj85#D)-V˦17T&*a`ע{.	"dwŒbhdSed'-TO.HQ'Q>3/
rqT}ŌPrl+Gѹ3U 
Ok/AY\@jѥ =G&B\h&hŞFj`(%BGU,YONWCv!^'NP݆t/1qvEhb.ALDe͓UZ,ʕUkX`;>@4r]x&c^=[_|7Zi:z?
T_7=_;o4mM!s}dE$_s)aVi&)hNma0U1޻=Ҳѿҁffʪ(IbT'q!Z1Bڋob8ZSiS?&W;ZI5"&fbOܛb))%o	X ZYV@ZR9_'~D=fz˕	YKǥgn\`G^yt]c'.Fٛb~HOٶX7ZR7s#Y=w߈?
V2@xs䑗ӱ؞o0m#1P$dyklGwfSz\ KC?a$,=n3k0z
vk>r;Iݒ66"R:.c#r8m9ndyr\f2 LlE<rp݌y؛=fYEv{k97g-O}+sCO>1r3#뿱?EO],m,GR4ٌavKϧY/0r|DFzLHG#!@[›qzO\	ynRI5ٟ\slgmیRr^H)0B2388
w(p˙gRsʔ Dc$ٯB.	\@ P
f+O	t	t@5}>@h#&Dᑆy0/蠈|]^`	"q3 @K|dasHO-xZJxBAtIIHS4#)8+GcB.h"r
	+;LܤQRC`Ca	5	,?x6'
.phvNIs@oD)їCcp硼490M#N7v
a%#!QP93bKyZ)f	vBP(20]5xgoiNm=Gps\@GEr뀘1~C')pIp!HrykAA׼ p4B҂44z\,y{R4ܐ-
0p!؝
tdc'A$
HHÒx?rBr)IBF_#x(
%< %~1xhE19$%.a()dp\u]i7P$y
yi|)obҐAODPacj+M]M`##kr=88Lf: @\Z6ѦNF)ǥ̞F\:a掾ےhsDsnïc&!1AQaq?ካ1ZuO!lhT%:,PB`yz$%t))=p>.pIIT*C^Ph}hh ٨Iʼnv( @LjH6dYI#|V@)@f0~X3΢Cc޴ZFYM=zM$Uyo:]s8~~
"1(̽1fn:?ڊܼ%˟Afr韥}u'а"i1p@`)(R}"cYA#7,OJ02eaYS1'OqԌeL)=y}# dOK9,D^aRyS1IyNJmd.MF~ * 4™+a18PaH'L0
F=Ɣ!f^J(R`Lcb71ۜFTRN19lFJ2!30b
9Br:|l䘃ESd0>j,0䉓JiH0GT΃
-C)T+{1 G`PVl{Ge"bxoT"(Hu{I81kΉՅNKR,u&0H @U^?jW+Ɏg,!fv;lrSOd])xy ()`u:Pp@$jyOz'C6[#A$t7YsjgeǵB8z20歕!9͚ѐL۵0<5sRoW&áez8༾I{ǰn,LkHcpb zL;"{qH!5e3\,OA`׽R0
C-`Z͛,|O3d) TI	XQh{5Ab*Cf?)"9Ӑ&Q4q-G>Ujxwv'qY3)`~/ز=8HzKN dpEVB7P5aNd0MBep5Ht,̂f$xN8BA86đP^S3
QD4[#;6Oi:Y0zpWqO0gk:x<ƑzXyiۂ wO@J5D}4|^&	Yc(}CϚGɞƬB|Rg;@g`V@o@"nyrd O[#l'."T
IV+9f*J:"\:)eƘb~lōŋczutgS;ui"ul/Zv'YV|ɞ1IdknK$α9iIaHN
b3K1?6VhRsgL2񦅬)g1LU(eD}Ჟb!3#|k^׀pOZrij)Nn)4)=튌@H^su&S	l20.lj"DO4N[I@G
h1EG͋GN0OGj
j$<J&=s4	Sl&~cV(I3fa|.1CLo*02Ň0pq8Ic	
>&j2=ɰljΎƹN';Y)Ew21@D?#ĉOK6)>jle("I	N[3F$ٺLrPCIS8|Q
5#WޔD36TD>Juf}+TX$:֪$h U
	T񯯵yAWd61jgcǵG1R&P8-*2}Cӊ\-{fEH9zRc((IP9eRahR~fXƂ'XVDx!A6 |=moJMɌ481%x^ֺ3J函ac#c$7OZr;!Ycƫ@CswbG|P`v$g#2Ozaԥdj@!DMd0LO)0/&GŔEp\1Jc֘(T|DE#FB9W_#[>{/Q]ʰ[`\hH{W2Y3pFv06D?VONj(	>	{?fJń?=lX=,Bs6Hlt!sIRbtesiXwHa,NڷCDs9(<"VQ'+6Pk_j$9K(,dERu+t^c\8D+/DJ°	qPf}eRF'V)eI[$Y/(g`8!itQc}b`e`''l6z㭂HG*l;?53JSVÊhfX>'il]hc5``	8L#t&f-L!20T$!"@$|SJE\F|PDz
;@`ChethO?Қ?M`c:1F#%X)@pZ. }nte2m2NkPH8{Q.DD&,D+Ϲ1QaK1b
ճTBj
32dmJILB=CqRb*}T`+\b4 Ԫ#q򲃂%MTbwT@ܨz\0#NnBGzLGspG_€2B.2yL؅,8IxM%F6$OXሊY-zJ=yi܁NbM~nƭGDc`\zYf#ȱ/?'8ޠR%y*ha0k&~jX9"K@D>uL,C<܇xoTfA6|J%|)]xlY|)ȹ!+<]SIqV.2@`U>!%ߥbL!7!2s{L֜j$b/HLc"Nx@ԏjQC$!J@$7`4sXː7saRp@s2[vǀSZKRRY{Y0rY4NfȢ

dFˬKL&úDaM %q:0ī/@]%ŏ@s+0~>=4}L|lb%yegRM$BaYE
ZD)a'ڐdžE)8KûfM3b1Su}fe`He>530GxxIp{ 
1]@Tiag"fT]R˵T+>^bl:0DL3@Asrke Pfr@e`uLTGb^‚9i.@G j:RdaTihFr֒'CNevF@TV*0&h0j'tAIB˝JX`k>]dDb)- d~}ѳ⡂z9QyGQL4w=Dw!&%*`:G!x6a(&$mHh/UȐMA~{	8c
T4;z<^J9dë\g'CP6Y'g:nA4`h0vą>C>1E2Lw)4L
C5!c`U<4pi
#F!ѫd0=%c&ΫX\λЅ䑯|h,(G(B Eܪ(nl1%J>DfPpaj3nVԝC":UL*X
ICLaZ0z_d;Y0<BJu[$lHvH"KHrgؖ
ks{:gpgCW'EP%33fوNP	"tq!eSDy6V,GBLđHdi1Ȫ>qGMܨFBKU48?{\✤W雷 |`L|QYYl̡0L%nx^~b&,*&cɻ._xy<߫_/_fn 7//o}sentry-python-2.18.0/tests/integrations/starlette/templates/000077500000000000000000000000001471214654000243265ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/starlette/templates/trace_meta.html000066400000000000000000000000301471214654000273110ustar00rootroot00000000000000{{ sentry_trace_meta }}
sentry-python-2.18.0/tests/integrations/starlette/test_starlette.py000066400000000000000000001221671471214654000257610ustar00rootroot00000000000000import asyncio
import base64
import functools
import json
import logging
import os
import re
import threading
import warnings
from unittest import mock

import pytest

from sentry_sdk import capture_message, get_baggage, get_traceparent
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
from sentry_sdk.integrations.starlette import (
    StarletteIntegration,
    StarletteRequestExtractor,
)
from sentry_sdk.utils import parse_version

import starlette
from starlette.authentication import (
    AuthCredentials,
    AuthenticationBackend,
    AuthenticationError,
    SimpleUser,
)
from starlette.exceptions import HTTPException
from starlette.middleware import Middleware
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.trustedhost import TrustedHostMiddleware
from starlette.testclient import TestClient


STARLETTE_VERSION = parse_version(starlette.__version__)

PICTURE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "photo.jpg")

BODY_JSON = {"some": "json", "for": "testing", "nested": {"numbers": 123}}

BODY_FORM = """--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="username"\r\n\r\nJane\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="password"\r\n\r\nhello123\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="photo"; filename="photo.jpg"\r\nContent-Type: image/jpg\r\nContent-Transfer-Encoding: base64\r\n\r\n{{image_data}}\r\n--fd721ef49ea403a6--\r\n""".replace(
    "{{image_data}}", str(base64.b64encode(open(PICTURE, "rb").read()))
)

FORM_RECEIVE_MESSAGES = [
    {"type": "http.request", "body": BODY_FORM.encode("utf-8")},
    {"type": "http.disconnect"},
]

JSON_RECEIVE_MESSAGES = [
    {"type": "http.request", "body": json.dumps(BODY_JSON).encode("utf-8")},
    {"type": "http.disconnect"},
]

PARSED_FORM = starlette.datastructures.FormData(
    [
        ("username", "Jane"),
        ("password", "hello123"),
        (
            "photo",
            starlette.datastructures.UploadFile(
                filename="photo.jpg",
                file=open(PICTURE, "rb"),
            ),
        ),
    ]
)

# Dummy ASGI scope for creating mock Starlette requests
SCOPE = {
    "client": ("172.29.0.10", 34784),
    "headers": [
        [b"host", b"example.com"],
        [b"user-agent", b"Mozilla/5.0 Gecko/20100101 Firefox/60.0"],
        [b"content-type", b"application/json"],
        [b"accept-language", b"en-US,en;q=0.5"],
        [b"accept-encoding", b"gzip, deflate, br"],
        [b"upgrade-insecure-requests", b"1"],
        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
    ],
    "http_version": "0.0",
    "method": "GET",
    "path": "/path",
    "query_string": b"qs=hello",
    "scheme": "http",
    "server": ("172.28.0.10", 8000),
    "type": "http",
}


async def _mock_receive(msg):
    return msg


from starlette.templating import Jinja2Templates


def starlette_app_factory(middleware=None, debug=True):
    template_dir = os.path.join(
        os.getcwd(), "tests", "integrations", "starlette", "templates"
    )
    templates = Jinja2Templates(directory=template_dir)

    async def _homepage(request):
        1 / 0
        return starlette.responses.JSONResponse({"status": "ok"})

    async def _custom_error(request):
        raise Exception("Too Hot")

    async def _message(request):
        capture_message("hi")
        return starlette.responses.JSONResponse({"status": "ok"})

    async def _nomessage(request):
        return starlette.responses.JSONResponse({"status": "ok"})

    async def _message_with_id(request):
        capture_message("hi")
        return starlette.responses.JSONResponse({"status": "ok"})

    def _thread_ids_sync(request):
        return starlette.responses.JSONResponse(
            {
                "main": threading.main_thread().ident,
                "active": threading.current_thread().ident,
            }
        )

    async def _thread_ids_async(request):
        return starlette.responses.JSONResponse(
            {
                "main": threading.main_thread().ident,
                "active": threading.current_thread().ident,
            }
        )

    async def _render_template(request):
        capture_message(get_traceparent() + "\n" + get_baggage())

        template_context = {
            "request": request,
            "msg": "Hello Template World!",
        }
        return templates.TemplateResponse("trace_meta.html", template_context)

    all_methods = [
        "CONNECT",
        "DELETE",
        "GET",
        "HEAD",
        "OPTIONS",
        "PATCH",
        "POST",
        "PUT",
        "TRACE",
    ]

    app = starlette.applications.Starlette(
        debug=debug,
        routes=[
            starlette.routing.Route("/some_url", _homepage),
            starlette.routing.Route("/custom_error", _custom_error),
            starlette.routing.Route("/message", _message),
            starlette.routing.Route("/nomessage", _nomessage, methods=all_methods),
            starlette.routing.Route("/message/{message_id}", _message_with_id),
            starlette.routing.Route("/sync/thread_ids", _thread_ids_sync),
            starlette.routing.Route("/async/thread_ids", _thread_ids_async),
            starlette.routing.Route("/render_template", _render_template),
        ],
        middleware=middleware,
    )

    return app


def async_return(result):
    f = asyncio.Future()
    f.set_result(result)
    return f


class BasicAuthBackend(AuthenticationBackend):
    async def authenticate(self, conn):
        if "Authorization" not in conn.headers:
            return

        auth = conn.headers["Authorization"]
        try:
            scheme, credentials = auth.split()
            if scheme.lower() != "basic":
                return
            decoded = base64.b64decode(credentials).decode("ascii")
        except (ValueError, UnicodeDecodeError):
            raise AuthenticationError("Invalid basic auth credentials")

        username, _, password = decoded.partition(":")

        # TODO: You'd want to verify the username and password here.

        return AuthCredentials(["authenticated"]), SimpleUser(username)


class AsyncIterator:
    def __init__(self, data):
        self.iter = iter(bytes(data, "utf-8"))

    def __aiter__(self):
        return self

    async def __anext__(self):
        try:
            return bytes([next(self.iter)])
        except StopIteration:
            raise StopAsyncIteration


class SampleMiddleware:
    def __init__(self, app):
        self.app = app

    async def __call__(self, scope, receive, send):
        # only handle http requests
        if scope["type"] != "http":
            await self.app(scope, receive, send)
            return

        async def do_stuff(message):
            if message["type"] == "http.response.start":
                # do something here.
                pass

            await send(message)

        await self.app(scope, receive, do_stuff)


class SampleReceiveSendMiddleware:
    def __init__(self, app):
        self.app = app

    async def __call__(self, scope, receive, send):
        message = await receive()
        assert message
        assert message["type"] == "http.request"

        send_output = await send({"type": "something-unimportant"})
        assert send_output is None

        await self.app(scope, receive, send)


class SamplePartialReceiveSendMiddleware:
    def __init__(self, app):
        self.app = app

    async def __call__(self, scope, receive, send):
        message = await receive()
        assert message
        assert message["type"] == "http.request"

        send_output = await send({"type": "something-unimportant"})
        assert send_output is None

        async def my_receive(*args, **kwargs):
            pass

        async def my_send(*args, **kwargs):
            pass

        partial_receive = functools.partial(my_receive)
        partial_send = functools.partial(my_send)

        await self.app(scope, partial_receive, partial_send)


@pytest.mark.asyncio
async def test_starletterequestextractor_content_length(sentry_init):
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
    ]
    starlette_request = starlette.requests.Request(scope)
    extractor = StarletteRequestExtractor(starlette_request)

    assert await extractor.content_length() == len(json.dumps(BODY_JSON))


@pytest.mark.asyncio
async def test_starletterequestextractor_cookies(sentry_init):
    starlette_request = starlette.requests.Request(SCOPE)
    extractor = StarletteRequestExtractor(starlette_request)

    assert extractor.cookies() == {
        "tasty_cookie": "strawberry",
        "yummy_cookie": "choco",
    }


@pytest.mark.asyncio
async def test_starletterequestextractor_json(sentry_init):
    starlette_request = starlette.requests.Request(SCOPE)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    assert extractor.is_json()
    assert await extractor.json() == BODY_JSON


@pytest.mark.asyncio
async def test_starletterequestextractor_form(sentry_init):
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
    ]
    # TODO add test for content-type: "application/x-www-form-urlencoded"

    starlette_request = starlette.requests.Request(scope)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    form_data = await extractor.form()
    assert form_data.keys() == PARSED_FORM.keys()
    assert form_data["username"] == PARSED_FORM["username"]
    assert form_data["password"] == PARSED_FORM["password"]
    assert form_data["photo"].filename == PARSED_FORM["photo"].filename

    # Make sure we still can read the body
    # after alreading it with extractor.form() above.
    body = await extractor.request.body()
    assert body


@pytest.mark.asyncio
async def test_starletterequestextractor_body_consumed_twice(
    sentry_init, capture_events
):
    """
    Starlette does cache when you read the request data via `request.json()`
    or `request.body()`, but it does NOT when using `request.form()`.
    So we have an edge case when the Sentry Starlette reads the body using `.form()`
    and the user wants to read the body using `.body()`.
    Because the underlying stream can not be consumed twice and is not cached.

    We have fixed this in `StarletteRequestExtractor.form()` by consuming the body
    first with `.body()` (to put it into the `_body` cache and then consume it with `.form()`.

    If this behavior is changed in Starlette and the `request.form()` in Starlette
    is also caching the body, this test will fail.

    See also https://github.com/encode/starlette/discussions/1933
    """
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
    ]

    starlette_request = starlette.requests.Request(scope)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    await extractor.request.form()

    with pytest.raises(RuntimeError):
        await extractor.request.body()


@pytest.mark.asyncio
async def test_starletterequestextractor_extract_request_info_too_big(sentry_init):
    sentry_init(
        send_default_pii=True,
        integrations=[StarletteIntegration()],
    )
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
        [b"content-length", str(len(BODY_FORM)).encode()],
        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
    ]
    starlette_request = starlette.requests.Request(scope)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    request_info = await extractor.extract_request_info()

    assert request_info
    assert request_info["cookies"] == {
        "tasty_cookie": "strawberry",
        "yummy_cookie": "choco",
    }
    # Because request is too big only the AnnotatedValue is extracted.
    assert request_info["data"].metadata == {"rem": [["!config", "x"]]}


@pytest.mark.asyncio
async def test_starletterequestextractor_extract_request_info(sentry_init):
    sentry_init(
        send_default_pii=True,
        integrations=[StarletteIntegration()],
    )
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-type", b"application/json"],
        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
    ]

    starlette_request = starlette.requests.Request(scope)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    request_info = await extractor.extract_request_info()

    assert request_info
    assert request_info["cookies"] == {
        "tasty_cookie": "strawberry",
        "yummy_cookie": "choco",
    }
    assert request_info["data"] == BODY_JSON


@pytest.mark.asyncio
async def test_starletterequestextractor_extract_request_info_no_pii(sentry_init):
    sentry_init(
        send_default_pii=False,
        integrations=[StarletteIntegration()],
    )
    scope = SCOPE.copy()
    scope["headers"] = [
        [b"content-type", b"application/json"],
        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
    ]

    starlette_request = starlette.requests.Request(scope)

    # Mocking async `_receive()` that works in Python 3.7+
    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
    starlette_request._receive = mock.Mock(side_effect=side_effect)

    extractor = StarletteRequestExtractor(starlette_request)

    request_info = await extractor.extract_request_info()

    assert request_info
    assert "cookies" not in request_info
    assert request_info["data"] == BODY_JSON


@pytest.mark.parametrize(
    "url,transaction_style,expected_transaction,expected_source",
    [
        (
            "/message",
            "url",
            "/message",
            "route",
        ),
        (
            "/message",
            "endpoint",
            "tests.integrations.starlette.test_starlette.starlette_app_factory.._message",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "/message/{message_id}",
            "route",
        ),
        (
            "/message/123456",
            "endpoint",
            "tests.integrations.starlette.test_starlette.starlette_app_factory.._message_with_id",
            "component",
        ),
    ],
)
def test_transaction_style(
    sentry_init,
    capture_events,
    url,
    transaction_style,
    expected_transaction,
    expected_source,
):
    sentry_init(
        integrations=[StarletteIntegration(transaction_style=transaction_style)],
    )
    starlette_app = starlette_app_factory()

    events = capture_events()

    client = TestClient(starlette_app)
    client.get(url)

    (event,) = events
    assert event["transaction"] == expected_transaction
    assert event["transaction_info"] == {"source": expected_source}


@pytest.mark.parametrize(
    "test_url,expected_error,expected_message",
    [
        ("/some_url", ZeroDivisionError, "division by zero"),
        ("/custom_error", Exception, "Too Hot"),
    ],
)
def test_catch_exceptions(
    sentry_init,
    capture_exceptions,
    capture_events,
    test_url,
    expected_error,
    expected_message,
):
    sentry_init(integrations=[StarletteIntegration()])
    starlette_app = starlette_app_factory()
    exceptions = capture_exceptions()
    events = capture_events()

    client = TestClient(starlette_app)
    try:
        client.get(test_url)
    except Exception:
        pass

    (exc,) = exceptions
    assert isinstance(exc, expected_error)
    assert str(exc) == expected_message

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "starlette"


def test_user_information_error(sentry_init, capture_events):
    sentry_init(
        send_default_pii=True,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/custom_error", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (event,) = events
    user = event.get("user", None)
    assert user
    assert "username" in user
    assert user["username"] == "Gabriela"


def test_user_information_error_no_pii(sentry_init, capture_events):
    sentry_init(
        send_default_pii=False,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/custom_error", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (event,) = events
    assert "user" not in event


def test_user_information_transaction(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        send_default_pii=True,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    client.get("/message", auth=("Gabriela", "hello123"))

    (_, transaction_event) = events
    user = transaction_event.get("user", None)
    assert user
    assert "username" in user
    assert user["username"] == "Gabriela"


def test_user_information_transaction_no_pii(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        send_default_pii=False,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    client.get("/message", auth=("Gabriela", "hello123"))

    (_, transaction_event) = events
    assert "user" not in transaction_event


def test_middleware_spans(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/message", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (_, transaction_event) = events

    expected_middleware_spans = [
        "ServerErrorMiddleware",
        "AuthenticationMiddleware",
        "ExceptionMiddleware",
        "AuthenticationMiddleware",  # 'op': 'middleware.starlette.send'
        "ServerErrorMiddleware",  # 'op': 'middleware.starlette.send'
        "AuthenticationMiddleware",  # 'op': 'middleware.starlette.send'
        "ServerErrorMiddleware",  # 'op': 'middleware.starlette.send'
    ]

    assert len(transaction_event["spans"]) == len(expected_middleware_spans)

    idx = 0
    for span in transaction_event["spans"]:
        if span["op"].startswith("middleware.starlette"):
            assert (
                span["tags"]["starlette.middleware_name"]
                == expected_middleware_spans[idx]
            )
            idx += 1


def test_middleware_spans_disabled(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarletteIntegration(middleware_spans=False)],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/message", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (_, transaction_event) = events

    assert len(transaction_event["spans"]) == 0


def test_middleware_callback_spans(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(middleware=[Middleware(SampleMiddleware)])
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/message", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (_, transaction_event) = events

    expected = [
        {
            "op": "middleware.starlette",
            "description": "ServerErrorMiddleware",
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
        {
            "op": "middleware.starlette",
            "description": "SampleMiddleware",
            "tags": {"starlette.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.starlette",
            "description": "ExceptionMiddleware",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "SampleMiddleware.__call__..do_stuff",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "ServerErrorMiddleware.__call__.._send",
            "tags": {"starlette.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "SampleMiddleware.__call__..do_stuff",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "ServerErrorMiddleware.__call__.._send",
            "tags": {"starlette.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
    ]

    idx = 0
    for span in transaction_event["spans"]:
        assert span["op"] == expected[idx]["op"]
        assert span["description"] == expected[idx]["description"]
        assert span["tags"] == expected[idx]["tags"]
        idx += 1


def test_middleware_receive_send(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(SampleReceiveSendMiddleware)]
    )

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        # NOTE: the assert statements checking
        # for correct behaviour are in `SampleReceiveSendMiddleware`!
        client.get("/message", auth=("Gabriela", "hello123"))
    except Exception:
        pass


def test_middleware_partial_receive_send(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarletteIntegration()],
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(SamplePartialReceiveSendMiddleware)]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/message", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (_, transaction_event) = events

    expected = [
        {
            "op": "middleware.starlette",
            "description": "ServerErrorMiddleware",
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
        {
            "op": "middleware.starlette",
            "description": "SamplePartialReceiveSendMiddleware",
            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
        {
            "op": "middleware.starlette.receive",
            "description": (
                "_ASGIAdapter.send..receive"
                if STARLETTE_VERSION < (0, 21)
                else "_TestClientTransport.handle_request..receive"
            ),
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "ServerErrorMiddleware.__call__.._send",
            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
        },
        {
            "op": "middleware.starlette",
            "description": "ExceptionMiddleware",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "functools.partial(.my_send at ",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
        {
            "op": "middleware.starlette.send",
            "description": "functools.partial(.my_send at ",
            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
        },
    ]

    idx = 0
    for span in transaction_event["spans"]:
        assert span["op"] == expected[idx]["op"]
        assert span["description"].startswith(expected[idx]["description"])
        assert span["tags"] == expected[idx]["tags"]
        idx += 1


def test_legacy_setup(
    sentry_init,
    capture_events,
):
    # Check that behaviour does not change
    # if the user just adds the new Integration
    # and forgets to remove SentryAsgiMiddleware
    sentry_init()
    app = starlette_app_factory()
    asgi_app = SentryAsgiMiddleware(app)

    events = capture_events()

    client = TestClient(asgi_app)
    client.get("/message/123456")

    (event,) = events
    assert event["transaction"] == "/message/{message_id}"


@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
@mock.patch("sentry_sdk.profiler.transaction_profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, endpoint):
    sentry_init(
        traces_sample_rate=1.0,
        profiles_sample_rate=1.0,
    )
    app = starlette_app_factory()
    asgi_app = SentryAsgiMiddleware(app)

    envelopes = capture_envelopes()

    client = TestClient(asgi_app)
    response = client.get(endpoint)
    assert response.status_code == 200

    data = json.loads(response.content)

    envelopes = [envelope for envelope in envelopes]
    assert len(envelopes) == 1

    profiles = [item for item in envelopes[0].items if item.type == "profile"]
    assert len(profiles) == 1

    for item in profiles:
        transactions = item.payload.json["transactions"]
        assert len(transactions) == 1
        assert str(data["active"]) == transactions[0]["active_thread_id"]

    transactions = [item for item in envelopes[0].items if item.type == "transaction"]
    assert len(transactions) == 1

    for item in transactions:
        transaction = item.payload.json
        trace_context = transaction["contexts"]["trace"]
        assert str(data["active"]) == trace_context["data"]["thread.id"]


def test_original_request_not_scrubbed(sentry_init, capture_events):
    sentry_init(integrations=[StarletteIntegration()])

    events = capture_events()

    async def _error(request):
        logging.critical("Oh no!")
        assert request.headers["Authorization"] == "Bearer ohno"
        assert await request.json() == {"password": "ohno"}
        return starlette.responses.JSONResponse({"status": "Oh no!"})

    app = starlette.applications.Starlette(
        routes=[
            starlette.routing.Route("/error", _error, methods=["POST"]),
        ],
    )

    client = TestClient(app)
    client.post(
        "/error",
        json={"password": "ohno"},
        headers={"Authorization": "Bearer ohno"},
    )

    event = events[0]
    assert event["request"]["data"] == {"password": "[Filtered]"}
    assert event["request"]["headers"]["authorization"] == "[Filtered]"


@pytest.mark.skipif(STARLETTE_VERSION < (0, 24), reason="Requires Starlette >= 0.24")
def test_template_tracing_meta(sentry_init, capture_events):
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[StarletteIntegration()],
    )
    events = capture_events()

    app = starlette_app_factory()

    client = TestClient(app)
    response = client.get("/render_template")
    assert response.status_code == 200

    rendered_meta = response.text
    traceparent, baggage = events[0]["message"].split("\n")
    assert traceparent != ""
    assert baggage != ""

    match = re.match(
        r'^',
        rendered_meta,
    )
    assert match is not None
    assert match.group(1) == traceparent

    rendered_baggage = match.group(2)
    assert rendered_baggage == baggage


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "tests.integrations.starlette.test_starlette.starlette_app_factory.._message_with_id",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "/message/{message_id}",
            "route",
        ),
    ],
)
def test_transaction_name(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    capture_envelopes,
):
    """
    Tests that the transaction name is something meaningful.
    """
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[StarletteIntegration(transaction_style=transaction_style)],
        traces_sample_rate=1.0,
    )

    envelopes = capture_envelopes()

    app = starlette_app_factory()
    client = TestClient(app)
    client.get(request_url)

    (_, transaction_envelope) = envelopes
    transaction_event = transaction_envelope.get_transaction_event()

    assert transaction_event["transaction"] == expected_transaction_name
    assert (
        transaction_event["transaction_info"]["source"] == expected_transaction_source
    )


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "http://testserver/message/123456",
            "url",
        ),
        (
            "/message/123456",
            "url",
            "http://testserver/message/123456",
            "url",
        ),
    ],
)
def test_transaction_name_in_traces_sampler(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
):
    """
    Tests that a custom traces_sampler has a meaningful transaction name.
    In this case the URL or endpoint, because we do not have the route yet.
    """

    def dummy_traces_sampler(sampling_context):
        assert (
            sampling_context["transaction_context"]["name"] == expected_transaction_name
        )
        assert (
            sampling_context["transaction_context"]["source"]
            == expected_transaction_source
        )

    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[StarletteIntegration(transaction_style=transaction_style)],
        traces_sampler=dummy_traces_sampler,
        traces_sample_rate=1.0,
    )

    app = starlette_app_factory()
    client = TestClient(app)
    client.get(request_url)


@pytest.mark.parametrize(
    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
    [
        (
            "/message/123456",
            "endpoint",
            "starlette.middleware.trustedhost.TrustedHostMiddleware",
            "component",
        ),
        (
            "/message/123456",
            "url",
            "http://testserver/message/123456",
            "url",
        ),
    ],
)
def test_transaction_name_in_middleware(
    sentry_init,
    request_url,
    transaction_style,
    expected_transaction_name,
    expected_transaction_source,
    capture_envelopes,
):
    """
    Tests that the transaction name is something meaningful.
    """
    sentry_init(
        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
        integrations=[
            StarletteIntegration(transaction_style=transaction_style),
        ],
        traces_sample_rate=1.0,
    )

    envelopes = capture_envelopes()

    middleware = [
        Middleware(
            TrustedHostMiddleware,
            allowed_hosts=["example.com", "*.example.com"],
        ),
    ]

    app = starlette_app_factory(middleware=middleware)
    client = TestClient(app)
    client.get(request_url)

    (transaction_envelope,) = envelopes
    transaction_event = transaction_envelope.get_transaction_event()

    assert transaction_event["contexts"]["response"]["status_code"] == 400
    assert transaction_event["transaction"] == expected_transaction_name
    assert (
        transaction_event["transaction_info"]["source"] == expected_transaction_source
    )


def test_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[StarletteIntegration()],
        traces_sample_rate=1.0,
    )
    starlette_app = starlette_app_factory(
        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
    )
    events = capture_events()

    client = TestClient(starlette_app, raise_server_exceptions=False)
    try:
        client.get("/message", auth=("Gabriela", "hello123"))
    except Exception:
        pass

    (_, event) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.starlette"
    for span in event["spans"]:
        assert span["origin"] == "auto.http.starlette"


class NonIterableContainer:
    """Wraps any container and makes it non-iterable.

    Used to test backwards compatibility with our old way of defining failed_request_status_codes, which allowed
    passing in a list of (possibly non-iterable) containers. The Python standard library does not provide any built-in
    non-iterable containers, so we have to define our own.
    """

    def __init__(self, inner):
        self.inner = inner

    def __contains__(self, item):
        return item in self.inner


parametrize_test_configurable_status_codes_deprecated = pytest.mark.parametrize(
    "failed_request_status_codes,status_code,expected_error",
    [
        (None, 500, True),
        (None, 400, False),
        ([500, 501], 500, True),
        ([500, 501], 401, False),
        ([range(400, 499)], 401, True),
        ([range(400, 499)], 500, False),
        ([range(400, 499), range(500, 599)], 300, False),
        ([range(400, 499), range(500, 599)], 403, True),
        ([range(400, 499), range(500, 599)], 503, True),
        ([range(400, 403), 500, 501], 401, True),
        ([range(400, 403), 500, 501], 405, False),
        ([range(400, 403), 500, 501], 501, True),
        ([range(400, 403), 500, 501], 503, False),
        ([], 500, False),
        ([NonIterableContainer(range(500, 600))], 500, True),
        ([NonIterableContainer(range(500, 600))], 404, False),
    ],
)
"""Test cases for configurable status codes (deprecated API).
Also used by the FastAPI tests.
"""


@parametrize_test_configurable_status_codes_deprecated
def test_configurable_status_codes_deprecated(
    sentry_init,
    capture_events,
    failed_request_status_codes,
    status_code,
    expected_error,
):
    with pytest.warns(DeprecationWarning):
        starlette_integration = StarletteIntegration(
            failed_request_status_codes=failed_request_status_codes
        )

    sentry_init(integrations=[starlette_integration])

    events = capture_events()

    async def _error(request):
        raise HTTPException(status_code)

    app = starlette.applications.Starlette(
        routes=[
            starlette.routing.Route("/error", _error, methods=["GET"]),
        ],
    )

    client = TestClient(app)
    client.get("/error")

    if expected_error:
        assert len(events) == 1
    else:
        assert not events


@pytest.mark.skipif(
    STARLETTE_VERSION < (0, 21),
    reason="Requires Starlette >= 0.21, because earlier versions do not support HTTP 'HEAD' requests",
)
def test_transaction_http_method_default(sentry_init, capture_events):
    """
    By default OPTIONS and HEAD requests do not create a transaction.
    """
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[
            StarletteIntegration(),
        ],
    )
    events = capture_events()

    starlette_app = starlette_app_factory()

    client = TestClient(starlette_app)
    client.get("/nomessage")
    client.options("/nomessage")
    client.head("/nomessage")

    assert len(events) == 1

    (event,) = events

    assert event["request"]["method"] == "GET"


@pytest.mark.skipif(
    STARLETTE_VERSION < (0, 21),
    reason="Requires Starlette >= 0.21, because earlier versions do not support HTTP 'HEAD' requests",
)
def test_transaction_http_method_custom(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[
            StarletteIntegration(
                http_methods_to_capture=(
                    "OPTIONS",
                    "head",
                ),  # capitalization does not matter
            ),
        ],
        debug=True,
    )
    events = capture_events()

    starlette_app = starlette_app_factory()

    client = TestClient(starlette_app)
    client.get("/nomessage")
    client.options("/nomessage")
    client.head("/nomessage")

    assert len(events) == 2

    (event1, event2) = events

    assert event1["request"]["method"] == "OPTIONS"
    assert event2["request"]["method"] == "HEAD"


parametrize_test_configurable_status_codes = pytest.mark.parametrize(
    ("failed_request_status_codes", "status_code", "expected_error"),
    (
        (None, 500, True),
        (None, 400, False),
        ({500, 501}, 500, True),
        ({500, 501}, 401, False),
        ({*range(400, 500)}, 401, True),
        ({*range(400, 500)}, 500, False),
        ({*range(400, 600)}, 300, False),
        ({*range(400, 600)}, 403, True),
        ({*range(400, 600)}, 503, True),
        ({*range(400, 403), 500, 501}, 401, True),
        ({*range(400, 403), 500, 501}, 405, False),
        ({*range(400, 403), 500, 501}, 501, True),
        ({*range(400, 403), 500, 501}, 503, False),
        (set(), 500, False),
    ),
)


@parametrize_test_configurable_status_codes
def test_configurable_status_codes(
    sentry_init,
    capture_events,
    failed_request_status_codes,
    status_code,
    expected_error,
):
    integration_kwargs = {}
    if failed_request_status_codes is not None:
        integration_kwargs["failed_request_status_codes"] = failed_request_status_codes

    with warnings.catch_warnings():
        warnings.simplefilter("error", DeprecationWarning)
        starlette_integration = StarletteIntegration(**integration_kwargs)

    sentry_init(integrations=[starlette_integration])

    events = capture_events()

    async def _error(_):
        raise HTTPException(status_code)

    app = starlette.applications.Starlette(
        routes=[
            starlette.routing.Route("/error", _error, methods=["GET"]),
        ],
    )

    client = TestClient(app)
    client.get("/error")

    assert len(events) == int(expected_error)
sentry-python-2.18.0/tests/integrations/starlite/000077500000000000000000000000001471214654000221505ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/starlite/__init__.py000066400000000000000000000000571471214654000242630ustar00rootroot00000000000000import pytest

pytest.importorskip("starlite")
sentry-python-2.18.0/tests/integrations/starlite/test_starlite.py000066400000000000000000000273461471214654000254240ustar00rootroot00000000000000from __future__ import annotations
import functools

import pytest

from sentry_sdk import capture_message
from sentry_sdk.integrations.starlite import StarliteIntegration

from typing import Any, Dict

from starlite import AbstractMiddleware, LoggingConfig, Starlite, get, Controller
from starlite.middleware import LoggingMiddlewareConfig, RateLimitConfig
from starlite.middleware.session.memory_backend import MemoryBackendConfig
from starlite.testing import TestClient


def starlite_app_factory(middleware=None, debug=True, exception_handlers=None):
    class MyController(Controller):
        path = "/controller"

        @get("/error")
        async def controller_error(self) -> None:
            raise Exception("Whoa")

    @get("/some_url")
    async def homepage_handler() -> "Dict[str, Any]":
        1 / 0
        return {"status": "ok"}

    @get("/custom_error", name="custom_name")
    async def custom_error() -> Any:
        raise Exception("Too Hot")

    @get("/message")
    async def message() -> "Dict[str, Any]":
        capture_message("hi")
        return {"status": "ok"}

    @get("/message/{message_id:str}")
    async def message_with_id() -> "Dict[str, Any]":
        capture_message("hi")
        return {"status": "ok"}

    logging_config = LoggingConfig()

    app = Starlite(
        route_handlers=[
            homepage_handler,
            custom_error,
            message,
            message_with_id,
            MyController,
        ],
        debug=debug,
        middleware=middleware,
        logging_config=logging_config,
        exception_handlers=exception_handlers,
    )

    return app


@pytest.mark.parametrize(
    "test_url,expected_error,expected_message,expected_tx_name",
    [
        (
            "/some_url",
            ZeroDivisionError,
            "division by zero",
            "tests.integrations.starlite.test_starlite.starlite_app_factory..homepage_handler",
        ),
        (
            "/custom_error",
            Exception,
            "Too Hot",
            "custom_name",
        ),
        (
            "/controller/error",
            Exception,
            "Whoa",
            "partial(.MyController.controller_error>)",
        ),
    ],
)
def test_catch_exceptions(
    sentry_init,
    capture_exceptions,
    capture_events,
    test_url,
    expected_error,
    expected_message,
    expected_tx_name,
):
    sentry_init(integrations=[StarliteIntegration()])
    starlite_app = starlite_app_factory()
    exceptions = capture_exceptions()
    events = capture_events()

    client = TestClient(starlite_app)
    try:
        client.get(test_url)
    except Exception:
        pass

    (exc,) = exceptions
    assert isinstance(exc, expected_error)
    assert str(exc) == expected_message

    (event,) = events
    assert event["transaction"] == expected_tx_name
    assert event["exception"]["values"][0]["mechanism"]["type"] == "starlite"


def test_middleware_spans(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarliteIntegration()],
    )

    logging_config = LoggingMiddlewareConfig()
    session_config = MemoryBackendConfig()
    rate_limit_config = RateLimitConfig(rate_limit=("hour", 5))

    starlite_app = starlite_app_factory(
        middleware=[
            session_config.middleware,
            logging_config.middleware,
            rate_limit_config.middleware,
        ]
    )
    events = capture_events()

    client = TestClient(
        starlite_app, raise_server_exceptions=False, base_url="http://testserver.local"
    )
    client.get("/message")

    (_, transaction_event) = events

    expected = {"SessionMiddleware", "LoggingMiddleware", "RateLimitMiddleware"}
    found = set()

    starlite_spans = (
        span
        for span in transaction_event["spans"]
        if span["op"] == "middleware.starlite"
    )

    for span in starlite_spans:
        assert span["description"] in expected
        assert span["description"] not in found
        found.add(span["description"])
        assert span["description"] == span["tags"]["starlite.middleware_name"]


def test_middleware_callback_spans(sentry_init, capture_events):
    class SampleMiddleware(AbstractMiddleware):
        async def __call__(self, scope, receive, send) -> None:
            async def do_stuff(message):
                if message["type"] == "http.response.start":
                    # do something here.
                    pass
                await send(message)

            await self.app(scope, receive, do_stuff)

    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarliteIntegration()],
    )
    starlite_app = starlite_app_factory(middleware=[SampleMiddleware])
    events = capture_events()

    client = TestClient(starlite_app, raise_server_exceptions=False)
    client.get("/message")

    (_, transaction_events) = events

    expected_starlite_spans = [
        {
            "op": "middleware.starlite",
            "description": "SampleMiddleware",
            "tags": {"starlite.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.starlite.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlite.middleware_name": "SampleMiddleware"},
        },
        {
            "op": "middleware.starlite.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlite.middleware_name": "SampleMiddleware"},
        },
    ]

    def is_matching_span(expected_span, actual_span):
        return (
            expected_span["op"] == actual_span["op"]
            and expected_span["description"] == actual_span["description"]
            and expected_span["tags"] == actual_span["tags"]
        )

    actual_starlite_spans = list(
        span
        for span in transaction_events["spans"]
        if "middleware.starlite" in span["op"]
    )
    assert len(actual_starlite_spans) == 3

    for expected_span in expected_starlite_spans:
        assert any(
            is_matching_span(expected_span, actual_span)
            for actual_span in actual_starlite_spans
        )


def test_middleware_receive_send(sentry_init, capture_events):
    class SampleReceiveSendMiddleware(AbstractMiddleware):
        async def __call__(self, scope, receive, send):
            message = await receive()
            assert message
            assert message["type"] == "http.request"

            send_output = await send({"type": "something-unimportant"})
            assert send_output is None

            await self.app(scope, receive, send)

    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarliteIntegration()],
    )
    starlite_app = starlite_app_factory(middleware=[SampleReceiveSendMiddleware])

    client = TestClient(starlite_app, raise_server_exceptions=False)
    # See SampleReceiveSendMiddleware.__call__ above for assertions of correct behavior
    client.get("/message")


def test_middleware_partial_receive_send(sentry_init, capture_events):
    class SamplePartialReceiveSendMiddleware(AbstractMiddleware):
        async def __call__(self, scope, receive, send):
            message = await receive()
            assert message
            assert message["type"] == "http.request"

            send_output = await send({"type": "something-unimportant"})
            assert send_output is None

            async def my_receive(*args, **kwargs):
                pass

            async def my_send(*args, **kwargs):
                pass

            partial_receive = functools.partial(my_receive)
            partial_send = functools.partial(my_send)

            await self.app(scope, partial_receive, partial_send)

    sentry_init(
        traces_sample_rate=1.0,
        integrations=[StarliteIntegration()],
    )
    starlite_app = starlite_app_factory(middleware=[SamplePartialReceiveSendMiddleware])
    events = capture_events()

    client = TestClient(starlite_app, raise_server_exceptions=False)
    # See SamplePartialReceiveSendMiddleware.__call__ above for assertions of correct behavior
    client.get("/message")

    (_, transaction_events) = events

    expected_starlite_spans = [
        {
            "op": "middleware.starlite",
            "description": "SamplePartialReceiveSendMiddleware",
            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
        {
            "op": "middleware.starlite.receive",
            "description": "TestClientTransport.create_receive..receive",
            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
        {
            "op": "middleware.starlite.send",
            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
        },
    ]

    def is_matching_span(expected_span, actual_span):
        return (
            expected_span["op"] == actual_span["op"]
            and actual_span["description"].startswith(expected_span["description"])
            and expected_span["tags"] == actual_span["tags"]
        )

    actual_starlite_spans = list(
        span
        for span in transaction_events["spans"]
        if "middleware.starlite" in span["op"]
    )
    assert len(actual_starlite_spans) == 3

    for expected_span in expected_starlite_spans:
        assert any(
            is_matching_span(expected_span, actual_span)
            for actual_span in actual_starlite_spans
        )


def test_span_origin(sentry_init, capture_events):
    sentry_init(
        integrations=[StarliteIntegration()],
        traces_sample_rate=1.0,
    )

    logging_config = LoggingMiddlewareConfig()
    session_config = MemoryBackendConfig()
    rate_limit_config = RateLimitConfig(rate_limit=("hour", 5))

    starlite_app = starlite_app_factory(
        middleware=[
            session_config.middleware,
            logging_config.middleware,
            rate_limit_config.middleware,
        ]
    )
    events = capture_events()

    client = TestClient(
        starlite_app, raise_server_exceptions=False, base_url="http://testserver.local"
    )
    client.get("/message")

    (_, event) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.starlite"
    for span in event["spans"]:
        assert span["origin"] == "auto.http.starlite"


@pytest.mark.parametrize(
    "is_send_default_pii",
    [
        True,
        False,
    ],
    ids=[
        "send_default_pii=True",
        "send_default_pii=False",
    ],
)
def test_starlite_scope_user_on_exception_event(
    sentry_init, capture_exceptions, capture_events, is_send_default_pii
):
    class TestUserMiddleware(AbstractMiddleware):
        async def __call__(self, scope, receive, send):
            scope["user"] = {
                "email": "lennon@thebeatles.com",
                "username": "john",
                "id": "1",
            }
            await self.app(scope, receive, send)

    sentry_init(
        integrations=[StarliteIntegration()], send_default_pii=is_send_default_pii
    )
    starlite_app = starlite_app_factory(middleware=[TestUserMiddleware])
    exceptions = capture_exceptions()
    events = capture_events()

    # This request intentionally raises an exception
    client = TestClient(starlite_app)
    try:
        client.get("/some_url")
    except Exception:
        pass

    assert len(exceptions) == 1
    assert len(events) == 1
    (event,) = events

    if is_send_default_pii:
        assert "user" in event
        assert event["user"] == {
            "email": "lennon@thebeatles.com",
            "username": "john",
            "id": "1",
        }
    else:
        assert "user" not in event
sentry-python-2.18.0/tests/integrations/stdlib/000077500000000000000000000000001471214654000216025ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/stdlib/test_httplib.py000066400000000000000000000266631471214654000246760ustar00rootroot00000000000000import random
from http.client import HTTPConnection, HTTPSConnection
from socket import SocketIO
from urllib.request import urlopen
from unittest import mock

import pytest

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import MATCH_ALL, SPANDATA
from sentry_sdk.tracing import Transaction
from sentry_sdk.integrations.stdlib import StdlibIntegration

from tests.conftest import ApproxDict, create_mock_http_server

PORT = create_mock_http_server()


def test_crumb_capture(sentry_init, capture_events):
    sentry_init(integrations=[StdlibIntegration()])
    events = capture_events()

    url = "http://localhost:{}/some/random/url".format(PORT)
    urlopen(url)

    capture_message("Testing!")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb["type"] == "http"
    assert crumb["category"] == "httplib"
    assert crumb["data"] == ApproxDict(
        {
            "url": url,
            SPANDATA.HTTP_METHOD: "GET",
            SPANDATA.HTTP_STATUS_CODE: 200,
            "reason": "OK",
            SPANDATA.HTTP_FRAGMENT: "",
            SPANDATA.HTTP_QUERY: "",
        }
    )


def test_crumb_capture_hint(sentry_init, capture_events):
    def before_breadcrumb(crumb, hint):
        crumb["data"]["extra"] = "foo"
        return crumb

    sentry_init(integrations=[StdlibIntegration()], before_breadcrumb=before_breadcrumb)
    events = capture_events()

    url = "http://localhost:{}/some/random/url".format(PORT)
    urlopen(url)

    capture_message("Testing!")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]
    assert crumb["type"] == "http"
    assert crumb["category"] == "httplib"
    assert crumb["data"] == ApproxDict(
        {
            "url": url,
            SPANDATA.HTTP_METHOD: "GET",
            SPANDATA.HTTP_STATUS_CODE: 200,
            "reason": "OK",
            "extra": "foo",
            SPANDATA.HTTP_FRAGMENT: "",
            SPANDATA.HTTP_QUERY: "",
        }
    )


def test_empty_realurl(sentry_init):
    """
    Ensure that after using sentry_sdk.init you can putrequest a
    None url.
    """

    sentry_init(dsn="")
    HTTPConnection("example.com", port=443).putrequest("POST", None)


def test_httplib_misuse(sentry_init, capture_events, request):
    """HTTPConnection.getresponse must be called after every call to
    HTTPConnection.request. However, if somebody does not abide by
    this contract, we still should handle this gracefully and not
    send mixed breadcrumbs.

    Test whether our breadcrumbs are coherent when somebody uses HTTPConnection
    wrongly.
    """

    sentry_init()
    events = capture_events()

    conn = HTTPConnection("localhost", PORT)

    # make sure we release the resource, even if the test fails
    request.addfinalizer(conn.close)

    conn.request("GET", "/200")

    with pytest.raises(Exception):  # noqa: B017
        # This raises an exception, because we didn't call `getresponse` for
        # the previous request yet.
        #
        # This call should not affect our breadcrumb.
        conn.request("POST", "/200")

    response = conn.getresponse()
    assert response._method == "GET"

    capture_message("Testing!")

    (event,) = events
    (crumb,) = event["breadcrumbs"]["values"]

    assert crumb["type"] == "http"
    assert crumb["category"] == "httplib"
    assert crumb["data"] == ApproxDict(
        {
            "url": "http://localhost:{}/200".format(PORT),
            SPANDATA.HTTP_METHOD: "GET",
            SPANDATA.HTTP_STATUS_CODE: 200,
            "reason": "OK",
            SPANDATA.HTTP_FRAGMENT: "",
            SPANDATA.HTTP_QUERY: "",
        }
    )


def test_outgoing_trace_headers(sentry_init, monkeypatch):
    # HTTPSConnection.send is passed a string containing (among other things)
    # the headers on the request. Mock it so we can check the headers, and also
    # so it doesn't try to actually talk to the internet.
    mock_send = mock.Mock()
    monkeypatch.setattr(HTTPSConnection, "send", mock_send)

    sentry_init(traces_sample_rate=1.0)

    headers = {}
    headers["baggage"] = (
        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
    )

    transaction = Transaction.continue_from_headers(headers)

    with start_transaction(
        transaction=transaction,
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="12312012123120121231201212312012",
    ) as transaction:
        HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers")

        (request_str,) = mock_send.call_args[0]
        request_headers = {}
        for line in request_str.decode("utf-8").split("\r\n")[1:]:
            if line:
                key, val = line.split(": ")
                request_headers[key] = val

        request_span = transaction._span_recorder.spans[-1]
        expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format(
            trace_id=transaction.trace_id,
            parent_span_id=request_span.span_id,
            sampled=1,
        )
        assert request_headers["sentry-trace"] == expected_sentry_trace

        expected_outgoing_baggage = (
            "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
            "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
            "sentry-sample_rate=0.01337,"
            "sentry-user_id=Am%C3%A9lie"
        )

        assert request_headers["baggage"] == expected_outgoing_baggage


def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch):
    # HTTPSConnection.send is passed a string containing (among other things)
    # the headers on the request. Mock it so we can check the headers, and also
    # so it doesn't try to actually talk to the internet.
    mock_send = mock.Mock()
    monkeypatch.setattr(HTTPSConnection, "send", mock_send)

    # make sure transaction is always sampled
    monkeypatch.setattr(random, "random", lambda: 0.1)

    sentry_init(traces_sample_rate=0.5, release="foo")
    transaction = Transaction.continue_from_headers({})

    with start_transaction(transaction=transaction, name="Head SDK tx") as transaction:
        HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers")

        (request_str,) = mock_send.call_args[0]
        request_headers = {}
        for line in request_str.decode("utf-8").split("\r\n")[1:]:
            if line:
                key, val = line.split(": ")
                request_headers[key] = val

        request_span = transaction._span_recorder.spans[-1]
        expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format(
            trace_id=transaction.trace_id,
            parent_span_id=request_span.span_id,
            sampled=1,
        )
        assert request_headers["sentry-trace"] == expected_sentry_trace

        expected_outgoing_baggage = (
            "sentry-trace_id=%s,"
            "sentry-environment=production,"
            "sentry-release=foo,"
            "sentry-sample_rate=0.5,"
            "sentry-sampled=%s"
        ) % (transaction.trace_id, "true" if transaction.sampled else "false")

        assert request_headers["baggage"] == expected_outgoing_baggage


@pytest.mark.parametrize(
    "trace_propagation_targets,host,path,trace_propagated",
    [
        [
            [],
            "example.com",
            "/",
            False,
        ],
        [
            None,
            "example.com",
            "/",
            False,
        ],
        [
            [MATCH_ALL],
            "example.com",
            "/",
            True,
        ],
        [
            ["https://example.com/"],
            "example.com",
            "/",
            True,
        ],
        [
            ["https://example.com/"],
            "example.com",
            "",
            False,
        ],
        [
            ["https://example.com"],
            "example.com",
            "",
            True,
        ],
        [
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "example.net",
            "",
            False,
        ],
        [
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "good.example.net",
            "",
            True,
        ],
        [
            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
            "good.example.net",
            "/some/thing",
            True,
        ],
    ],
)
def test_option_trace_propagation_targets(
    sentry_init, monkeypatch, trace_propagation_targets, host, path, trace_propagated
):
    # HTTPSConnection.send is passed a string containing (among other things)
    # the headers on the request. Mock it so we can check the headers, and also
    # so it doesn't try to actually talk to the internet.
    mock_send = mock.Mock()
    monkeypatch.setattr(HTTPSConnection, "send", mock_send)

    sentry_init(
        trace_propagation_targets=trace_propagation_targets,
        traces_sample_rate=1.0,
    )

    headers = {
        "baggage": (
            "sentry-trace_id=771a43a4192642f0b136d5159a501700, "
            "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
        )
    }

    transaction = Transaction.continue_from_headers(headers)

    with start_transaction(
        transaction=transaction,
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="12312012123120121231201212312012",
    ) as transaction:
        HTTPSConnection(host).request("GET", path)

        (request_str,) = mock_send.call_args[0]
        request_headers = {}
        for line in request_str.decode("utf-8").split("\r\n")[1:]:
            if line:
                key, val = line.split(": ")
                request_headers[key] = val

        if trace_propagated:
            assert "sentry-trace" in request_headers
            assert "baggage" in request_headers
        else:
            assert "sentry-trace" not in request_headers
            assert "baggage" not in request_headers


def test_span_origin(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, debug=True)
    events = capture_events()

    with start_transaction(name="foo"):
        conn = HTTPSConnection("example.com")
        conn.request("GET", "/foo")
        conn.getresponse()

    (event,) = events
    assert event["contexts"]["trace"]["origin"] == "manual"

    assert event["spans"][0]["op"] == "http.client"
    assert event["spans"][0]["origin"] == "auto.http.stdlib.httplib"


def test_http_timeout(monkeypatch, sentry_init, capture_envelopes):
    mock_readinto = mock.Mock(side_effect=TimeoutError)
    monkeypatch.setattr(SocketIO, "readinto", mock_readinto)

    sentry_init(traces_sample_rate=1.0)

    envelopes = capture_envelopes()

    with start_transaction(op="op", name="name"):
        try:
            conn = HTTPSConnection("www.squirrelchasers.com")
            conn.request("GET", "/top-chasers")
            conn.getresponse()
        except Exception:
            pass

    items = [
        item
        for envelope in envelopes
        for item in envelope.items
        if item.type == "transaction"
    ]
    assert len(items) == 1

    transaction = items[0].payload.json
    assert len(transaction["spans"]) == 1

    span = transaction["spans"][0]
    assert span["op"] == "http.client"
    assert span["description"] == "GET https://www.squirrelchasers.com/top-chasers"
sentry-python-2.18.0/tests/integrations/stdlib/test_subprocess.py000066400000000000000000000144141471214654000254070ustar00rootroot00000000000000import os
import platform
import subprocess
import sys
from collections.abc import Mapping

import pytest

from sentry_sdk import capture_message, start_transaction
from sentry_sdk.integrations.stdlib import StdlibIntegration
from tests.conftest import ApproxDict


class ImmutableDict(Mapping):
    def __init__(self, inner):
        self.inner = inner

    def __getitem__(self, key):
        return self.inner[key]

    def __iter__(self):
        return iter(self.inner)

    def __len__(self):
        return len(self.inner)


@pytest.mark.parametrize("positional_args", [True, False])
@pytest.mark.parametrize(
    "iterator",
    [
        pytest.param(
            True,
            marks=pytest.mark.skipif(
                platform.python_implementation() == "PyPy",
                reason="https://bitbucket.org/pypy/pypy/issues/3050/subprocesspopen-only-accepts-sequences",
            ),
        ),
        False,
    ],
    ids=("as_iterator", "as_list"),
)
@pytest.mark.parametrize("env_mapping", [None, os.environ, ImmutableDict(os.environ)])
@pytest.mark.parametrize("with_cwd", [True, False])
def test_subprocess_basic(
    sentry_init,
    capture_events,
    monkeypatch,
    positional_args,
    iterator,
    env_mapping,
    with_cwd,
):
    monkeypatch.setenv("FOO", "bar")

    old_environ = dict(os.environ)

    sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction(name="foo") as transaction:
        args = [
            sys.executable,
            "-c",
            "import os; "
            "import sentry_sdk; "
            "from sentry_sdk.integrations.stdlib import get_subprocess_traceparent_headers; "
            "sentry_sdk.init(); "
            "assert os.environ['FOO'] == 'bar'; "
            "print(dict(get_subprocess_traceparent_headers()))",
        ]

        if iterator:
            args = iter(args)

        if positional_args:
            a = (
                args,
                0,  # bufsize
                None,  # executable
                None,  # stdin
                subprocess.PIPE,  # stdout
                None,  # stderr
                None,  # preexec_fn
                False,  # close_fds
                False,  # shell
                os.getcwd() if with_cwd else None,  # cwd
            )

            if env_mapping is not None:
                a += (env_mapping,)

            popen = subprocess.Popen(*a)

        else:
            kw = {"args": args, "stdout": subprocess.PIPE}

            if with_cwd:
                kw["cwd"] = os.getcwd()

            if env_mapping is not None:
                kw["env"] = env_mapping

            popen = subprocess.Popen(**kw)

        output, unused_err = popen.communicate()
        retcode = popen.poll()
        assert not retcode

    assert os.environ == old_environ

    assert transaction.trace_id in str(output)

    capture_message("hi")

    (
        transaction_event,
        message_event,
    ) = events

    assert message_event["message"] == "hi"

    data = ApproxDict({"subprocess.cwd": os.getcwd()} if with_cwd else {})

    (crumb,) = message_event["breadcrumbs"]["values"]
    assert crumb == {
        "category": "subprocess",
        "data": data,
        "message": crumb["message"],
        "timestamp": crumb["timestamp"],
        "type": "subprocess",
    }

    if not iterator:
        assert crumb["message"].startswith(sys.executable + " ")

    assert transaction_event["type"] == "transaction"

    (
        subprocess_init_span,
        subprocess_communicate_span,
        subprocess_wait_span,
    ) = transaction_event["spans"]

    assert (
        subprocess_init_span["op"],
        subprocess_communicate_span["op"],
        subprocess_wait_span["op"],
    ) == ("subprocess", "subprocess.communicate", "subprocess.wait")

    # span hierarchy
    assert (
        subprocess_wait_span["parent_span_id"] == subprocess_communicate_span["span_id"]
    )
    assert (
        subprocess_communicate_span["parent_span_id"]
        == subprocess_init_span["parent_span_id"]
        == transaction_event["contexts"]["trace"]["span_id"]
    )

    # common data
    assert (
        subprocess_init_span["tags"]["subprocess.pid"]
        == subprocess_wait_span["tags"]["subprocess.pid"]
        == subprocess_communicate_span["tags"]["subprocess.pid"]
    )

    # data of init span
    assert subprocess_init_span.get("data", {}) == data
    if iterator:
        assert "iterator" in subprocess_init_span["description"]
        assert subprocess_init_span["description"].startswith("<")
    else:
        assert sys.executable + " -c" in subprocess_init_span["description"]


def test_subprocess_empty_env(sentry_init, monkeypatch):
    monkeypatch.setenv("TEST_MARKER", "should_not_be_seen")
    sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0)
    with start_transaction(name="foo"):
        args = [
            sys.executable,
            "-c",
            "import os; print(os.environ.get('TEST_MARKER', None))",
        ]
        output = subprocess.check_output(args, env={}, universal_newlines=True)
    assert "should_not_be_seen" not in output


def test_subprocess_invalid_args(sentry_init):
    sentry_init(integrations=[StdlibIntegration()])

    with pytest.raises(TypeError) as excinfo:
        subprocess.Popen(1)

    assert "'int' object is not iterable" in str(excinfo.value)


def test_subprocess_span_origin(sentry_init, capture_events):
    sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction(name="foo"):
        args = [
            sys.executable,
            "-c",
            "print('hello world')",
        ]
        kw = {"args": args, "stdout": subprocess.PIPE}

        popen = subprocess.Popen(**kw)
        popen.communicate()
        popen.poll()

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"

    assert event["spans"][0]["op"] == "subprocess"
    assert event["spans"][0]["origin"] == "auto.subprocess.stdlib.subprocess"

    assert event["spans"][1]["op"] == "subprocess.communicate"
    assert event["spans"][1]["origin"] == "auto.subprocess.stdlib.subprocess"

    assert event["spans"][2]["op"] == "subprocess.wait"
    assert event["spans"][2]["origin"] == "auto.subprocess.stdlib.subprocess"
sentry-python-2.18.0/tests/integrations/strawberry/000077500000000000000000000000001471214654000225255ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/strawberry/__init__.py000066400000000000000000000000001471214654000246240ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/strawberry/test_strawberry.py000066400000000000000000000566411471214654000263560ustar00rootroot00000000000000import pytest
from typing import AsyncGenerator, Optional

strawberry = pytest.importorskip("strawberry")
pytest.importorskip("fastapi")
pytest.importorskip("flask")

from unittest import mock

from fastapi import FastAPI
from fastapi.testclient import TestClient
from flask import Flask
from strawberry.fastapi import GraphQLRouter
from strawberry.flask.views import GraphQLView

from sentry_sdk.consts import OP
from sentry_sdk.integrations.fastapi import FastApiIntegration
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration
from sentry_sdk.integrations.strawberry import (
    StrawberryIntegration,
    SentryAsyncExtension,
    SentrySyncExtension,
)
from tests.conftest import ApproxDict

try:
    from strawberry.extensions.tracing import (
        SentryTracingExtension,
        SentryTracingExtensionSync,
    )
except ImportError:
    SentryTracingExtension = None
    SentryTracingExtensionSync = None

parameterize_strawberry_test = pytest.mark.parametrize(
    "client_factory,async_execution,framework_integrations",
    (
        (
            "async_app_client_factory",
            True,
            [FastApiIntegration(), StarletteIntegration()],
        ),
        ("sync_app_client_factory", False, [FlaskIntegration()]),
    ),
)


@strawberry.type
class Query:
    @strawberry.field
    def hello(self) -> str:
        return "Hello World"

    @strawberry.field
    def error(self) -> int:
        return 1 / 0


@strawberry.type
class Mutation:
    @strawberry.mutation
    def change(self, attribute: str) -> str:
        return attribute


@strawberry.type
class Message:
    content: str


@strawberry.type
class Subscription:
    @strawberry.subscription
    async def message_added(self) -> Optional[AsyncGenerator[Message, None]]:
        message = Message(content="Hello, world!")
        yield message


@pytest.fixture
def async_app_client_factory():
    def create_app(schema):
        async_app = FastAPI()
        async_app.include_router(GraphQLRouter(schema), prefix="/graphql")
        return TestClient(async_app)

    return create_app


@pytest.fixture
def sync_app_client_factory():
    def create_app(schema):
        sync_app = Flask(__name__)
        sync_app.add_url_rule(
            "/graphql",
            view_func=GraphQLView.as_view("graphql_view", schema=schema),
        )
        return sync_app.test_client()

    return create_app


def test_async_execution_uses_async_extension(sentry_init):
    sentry_init(integrations=[StrawberryIntegration(async_execution=True)])

    with mock.patch(
        "sentry_sdk.integrations.strawberry._get_installed_modules",
        return_value={"flask": "2.3.3"},
    ):
        # actual installed modules should not matter, the explicit option takes
        # precedence
        schema = strawberry.Schema(Query)
        assert SentryAsyncExtension in schema.extensions


def test_sync_execution_uses_sync_extension(sentry_init):
    sentry_init(integrations=[StrawberryIntegration(async_execution=False)])

    with mock.patch(
        "sentry_sdk.integrations.strawberry._get_installed_modules",
        return_value={"fastapi": "0.103.1", "starlette": "0.27.0"},
    ):
        # actual installed modules should not matter, the explicit option takes
        # precedence
        schema = strawberry.Schema(Query)
        assert SentrySyncExtension in schema.extensions


def test_infer_execution_type_from_installed_packages_async(sentry_init):
    sentry_init(integrations=[StrawberryIntegration()])

    with mock.patch(
        "sentry_sdk.integrations.strawberry._get_installed_modules",
        return_value={"fastapi": "0.103.1", "starlette": "0.27.0"},
    ):
        schema = strawberry.Schema(Query)
        assert SentryAsyncExtension in schema.extensions


def test_infer_execution_type_from_installed_packages_sync(sentry_init):
    sentry_init(integrations=[StrawberryIntegration()])

    with mock.patch(
        "sentry_sdk.integrations.strawberry._get_installed_modules",
        return_value={"flask": "2.3.3"},
    ):
        schema = strawberry.Schema(Query)
        assert SentrySyncExtension in schema.extensions


@pytest.mark.skipif(
    SentryTracingExtension is None,
    reason="SentryTracingExtension no longer available in this Strawberry version",
)
def test_replace_existing_sentry_async_extension(sentry_init):
    sentry_init(integrations=[StrawberryIntegration()])

    schema = strawberry.Schema(Query, extensions=[SentryTracingExtension])
    assert SentryTracingExtension not in schema.extensions
    assert SentrySyncExtension not in schema.extensions
    assert SentryAsyncExtension in schema.extensions


@pytest.mark.skipif(
    SentryTracingExtensionSync is None,
    reason="SentryTracingExtensionSync no longer available in this Strawberry version",
)
def test_replace_existing_sentry_sync_extension(sentry_init):
    sentry_init(integrations=[StrawberryIntegration()])

    schema = strawberry.Schema(Query, extensions=[SentryTracingExtensionSync])
    assert SentryTracingExtensionSync not in schema.extensions
    assert SentryAsyncExtension not in schema.extensions
    assert SentrySyncExtension in schema.extensions


@parameterize_strawberry_test
def test_capture_request_if_available_and_send_pii_is_on(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        send_default_pii=True,
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "query ErrorQuery { error }"
    client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"})

    assert len(events) == 1

    (error_event,) = events

    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry"
    assert error_event["request"]["api_target"] == "graphql"
    assert error_event["request"]["data"] == {
        "query": query,
        "operationName": "ErrorQuery",
    }
    assert error_event["contexts"]["response"] == {
        "data": {
            "data": None,
            "errors": [
                {
                    "message": "division by zero",
                    "locations": [{"line": 1, "column": 20}],
                    "path": ["error"],
                }
            ],
        }
    }
    assert len(error_event["breadcrumbs"]["values"]) == 1
    assert error_event["breadcrumbs"]["values"][0]["category"] == "graphql.operation"
    assert error_event["breadcrumbs"]["values"][0]["data"] == {
        "operation_name": "ErrorQuery",
        "operation_type": "query",
    }


@parameterize_strawberry_test
def test_do_not_capture_request_if_send_pii_is_off(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "query ErrorQuery { error }"
    client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"})

    assert len(events) == 1

    (error_event,) = events
    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry"
    assert "data" not in error_event["request"]
    assert "response" not in error_event["contexts"]

    assert len(error_event["breadcrumbs"]["values"]) == 1
    assert error_event["breadcrumbs"]["values"][0]["category"] == "graphql.operation"
    assert error_event["breadcrumbs"]["values"][0]["data"] == {
        "operation_name": "ErrorQuery",
        "operation_type": "query",
    }


@parameterize_strawberry_test
def test_breadcrumb_no_operation_name(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "{ error }"
    client.post("/graphql", json={"query": query})

    assert len(events) == 1

    (error_event,) = events

    assert len(error_event["breadcrumbs"]["values"]) == 1
    assert error_event["breadcrumbs"]["values"][0]["category"] == "graphql.operation"
    assert error_event["breadcrumbs"]["values"][0]["data"] == {
        "operation_name": None,
        "operation_type": "query",
    }


@parameterize_strawberry_test
def test_capture_transaction_on_error(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        send_default_pii=True,
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
        traces_sample_rate=1,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "query ErrorQuery { error }"
    client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"})

    assert len(events) == 2
    (_, transaction_event) = events

    assert transaction_event["transaction"] == "ErrorQuery"
    assert transaction_event["contexts"]["trace"]["op"] == OP.GRAPHQL_QUERY
    assert transaction_event["spans"]

    query_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_QUERY
    ]
    assert len(query_spans) == 1, "exactly one query span expected"
    query_span = query_spans[0]
    assert query_span["description"] == "query ErrorQuery"
    assert query_span["data"]["graphql.operation.type"] == "query"
    assert query_span["data"]["graphql.operation.name"] == "ErrorQuery"
    assert query_span["data"]["graphql.document"] == query
    assert query_span["data"]["graphql.resource_name"]

    parse_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
    ]
    assert len(parse_spans) == 1, "exactly one parse span expected"
    parse_span = parse_spans[0]
    assert parse_span["parent_span_id"] == query_span["span_id"]
    assert parse_span["description"] == "parsing"

    validate_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
    ]
    assert len(validate_spans) == 1, "exactly one validate span expected"
    validate_span = validate_spans[0]
    assert validate_span["parent_span_id"] == query_span["span_id"]
    assert validate_span["description"] == "validation"

    resolve_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
    ]
    assert len(resolve_spans) == 1, "exactly one resolve span expected"
    resolve_span = resolve_spans[0]
    assert resolve_span["parent_span_id"] == query_span["span_id"]
    assert resolve_span["description"] == "resolving Query.error"
    assert resolve_span["data"] == ApproxDict(
        {
            "graphql.field_name": "error",
            "graphql.parent_type": "Query",
            "graphql.field_path": "Query.error",
            "graphql.path": "error",
        }
    )


@parameterize_strawberry_test
def test_capture_transaction_on_success(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
        traces_sample_rate=1,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "query GreetingQuery { hello }"
    client.post("/graphql", json={"query": query, "operationName": "GreetingQuery"})

    assert len(events) == 1
    (transaction_event,) = events

    assert transaction_event["transaction"] == "GreetingQuery"
    assert transaction_event["contexts"]["trace"]["op"] == OP.GRAPHQL_QUERY
    assert transaction_event["spans"]

    query_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_QUERY
    ]
    assert len(query_spans) == 1, "exactly one query span expected"
    query_span = query_spans[0]
    assert query_span["description"] == "query GreetingQuery"
    assert query_span["data"]["graphql.operation.type"] == "query"
    assert query_span["data"]["graphql.operation.name"] == "GreetingQuery"
    assert query_span["data"]["graphql.document"] == query
    assert query_span["data"]["graphql.resource_name"]

    parse_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
    ]
    assert len(parse_spans) == 1, "exactly one parse span expected"
    parse_span = parse_spans[0]
    assert parse_span["parent_span_id"] == query_span["span_id"]
    assert parse_span["description"] == "parsing"

    validate_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
    ]
    assert len(validate_spans) == 1, "exactly one validate span expected"
    validate_span = validate_spans[0]
    assert validate_span["parent_span_id"] == query_span["span_id"]
    assert validate_span["description"] == "validation"

    resolve_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
    ]
    assert len(resolve_spans) == 1, "exactly one resolve span expected"
    resolve_span = resolve_spans[0]
    assert resolve_span["parent_span_id"] == query_span["span_id"]
    assert resolve_span["description"] == "resolving Query.hello"
    assert resolve_span["data"] == ApproxDict(
        {
            "graphql.field_name": "hello",
            "graphql.parent_type": "Query",
            "graphql.field_path": "Query.hello",
            "graphql.path": "hello",
        }
    )


@parameterize_strawberry_test
def test_transaction_no_operation_name(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
        traces_sample_rate=1,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "{ hello }"
    client.post("/graphql", json={"query": query})

    assert len(events) == 1
    (transaction_event,) = events

    if async_execution:
        assert transaction_event["transaction"] == "/graphql"
    else:
        assert transaction_event["transaction"] == "graphql_view"

    assert transaction_event["spans"]

    query_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_QUERY
    ]
    assert len(query_spans) == 1, "exactly one query span expected"
    query_span = query_spans[0]
    assert query_span["description"] == "query"
    assert query_span["data"]["graphql.operation.type"] == "query"
    assert query_span["data"]["graphql.operation.name"] is None
    assert query_span["data"]["graphql.document"] == query
    assert query_span["data"]["graphql.resource_name"]

    parse_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
    ]
    assert len(parse_spans) == 1, "exactly one parse span expected"
    parse_span = parse_spans[0]
    assert parse_span["parent_span_id"] == query_span["span_id"]
    assert parse_span["description"] == "parsing"

    validate_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
    ]
    assert len(validate_spans) == 1, "exactly one validate span expected"
    validate_span = validate_spans[0]
    assert validate_span["parent_span_id"] == query_span["span_id"]
    assert validate_span["description"] == "validation"

    resolve_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
    ]
    assert len(resolve_spans) == 1, "exactly one resolve span expected"
    resolve_span = resolve_spans[0]
    assert resolve_span["parent_span_id"] == query_span["span_id"]
    assert resolve_span["description"] == "resolving Query.hello"
    assert resolve_span["data"] == ApproxDict(
        {
            "graphql.field_name": "hello",
            "graphql.parent_type": "Query",
            "graphql.field_path": "Query.hello",
            "graphql.path": "hello",
        }
    )


@parameterize_strawberry_test
def test_transaction_mutation(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
        traces_sample_rate=1,
    )
    events = capture_events()

    schema = strawberry.Schema(Query, mutation=Mutation)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = 'mutation Change { change(attribute: "something") }'
    client.post("/graphql", json={"query": query})

    assert len(events) == 1
    (transaction_event,) = events

    assert transaction_event["transaction"] == "Change"
    assert transaction_event["contexts"]["trace"]["op"] == OP.GRAPHQL_MUTATION
    assert transaction_event["spans"]

    query_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_MUTATION
    ]
    assert len(query_spans) == 1, "exactly one mutation span expected"
    query_span = query_spans[0]
    assert query_span["description"] == "mutation"
    assert query_span["data"]["graphql.operation.type"] == "mutation"
    assert query_span["data"]["graphql.operation.name"] is None
    assert query_span["data"]["graphql.document"] == query
    assert query_span["data"]["graphql.resource_name"]

    parse_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
    ]
    assert len(parse_spans) == 1, "exactly one parse span expected"
    parse_span = parse_spans[0]
    assert parse_span["parent_span_id"] == query_span["span_id"]
    assert parse_span["description"] == "parsing"

    validate_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
    ]
    assert len(validate_spans) == 1, "exactly one validate span expected"
    validate_span = validate_spans[0]
    assert validate_span["parent_span_id"] == query_span["span_id"]
    assert validate_span["description"] == "validation"

    resolve_spans = [
        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
    ]
    assert len(resolve_spans) == 1, "exactly one resolve span expected"
    resolve_span = resolve_spans[0]
    assert resolve_span["parent_span_id"] == query_span["span_id"]
    assert resolve_span["description"] == "resolving Mutation.change"
    assert resolve_span["data"] == ApproxDict(
        {
            "graphql.field_name": "change",
            "graphql.parent_type": "Mutation",
            "graphql.field_path": "Mutation.change",
            "graphql.path": "change",
        }
    )


@parameterize_strawberry_test
def test_handle_none_query_gracefully(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
    )
    events = capture_events()

    schema = strawberry.Schema(Query)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    client.post("/graphql", json={})

    assert len(events) == 0, "expected no events to be sent to Sentry"


@parameterize_strawberry_test
def test_span_origin(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    """
    Tests for OP.GRAPHQL_MUTATION, OP.GRAPHQL_PARSE, OP.GRAPHQL_VALIDATE, OP.GRAPHQL_RESOLVE,
    """
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
        traces_sample_rate=1,
    )
    events = capture_events()

    schema = strawberry.Schema(Query, mutation=Mutation)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = 'mutation Change { change(attribute: "something") }'
    client.post("/graphql", json={"query": query})

    (event,) = events

    is_flask = "Flask" in str(framework_integrations[0])
    if is_flask:
        assert event["contexts"]["trace"]["origin"] == "auto.http.flask"
    else:
        assert event["contexts"]["trace"]["origin"] == "auto.http.starlette"

    for span in event["spans"]:
        if span["op"].startswith("graphql."):
            assert span["origin"] == "auto.graphql.strawberry"


@parameterize_strawberry_test
def test_span_origin2(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    """
    Tests for OP.GRAPHQL_QUERY
    """
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
        traces_sample_rate=1,
    )
    events = capture_events()

    schema = strawberry.Schema(Query, mutation=Mutation)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "query GreetingQuery { hello }"
    client.post("/graphql", json={"query": query, "operationName": "GreetingQuery"})

    (event,) = events

    is_flask = "Flask" in str(framework_integrations[0])
    if is_flask:
        assert event["contexts"]["trace"]["origin"] == "auto.http.flask"
    else:
        assert event["contexts"]["trace"]["origin"] == "auto.http.starlette"

    for span in event["spans"]:
        if span["op"].startswith("graphql."):
            assert span["origin"] == "auto.graphql.strawberry"


@parameterize_strawberry_test
def test_span_origin3(
    request,
    sentry_init,
    capture_events,
    client_factory,
    async_execution,
    framework_integrations,
):
    """
    Tests for OP.GRAPHQL_SUBSCRIPTION
    """
    sentry_init(
        integrations=[
            StrawberryIntegration(async_execution=async_execution),
        ]
        + framework_integrations,
        traces_sample_rate=1,
    )
    events = capture_events()

    schema = strawberry.Schema(Query, subscription=Subscription)

    client_factory = request.getfixturevalue(client_factory)
    client = client_factory(schema)

    query = "subscription { messageAdded { content } }"
    client.post("/graphql", json={"query": query})

    (event,) = events

    is_flask = "Flask" in str(framework_integrations[0])
    if is_flask:
        assert event["contexts"]["trace"]["origin"] == "auto.http.flask"
    else:
        assert event["contexts"]["trace"]["origin"] == "auto.http.starlette"

    for span in event["spans"]:
        if span["op"].startswith("graphql."):
            assert span["origin"] == "auto.graphql.strawberry"
sentry-python-2.18.0/tests/integrations/sys_exit/000077500000000000000000000000001471214654000221705ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/sys_exit/test_sys_exit.py000066400000000000000000000043271471214654000254560ustar00rootroot00000000000000import sys

import pytest

from sentry_sdk.integrations.sys_exit import SysExitIntegration


@pytest.mark.parametrize(
    ("integration_params", "exit_status", "should_capture"),
    (
        ({}, 0, False),
        ({}, 1, True),
        ({}, None, False),
        ({}, "unsuccessful exit", True),
        ({"capture_successful_exits": False}, 0, False),
        ({"capture_successful_exits": False}, 1, True),
        ({"capture_successful_exits": False}, None, False),
        ({"capture_successful_exits": False}, "unsuccessful exit", True),
        ({"capture_successful_exits": True}, 0, True),
        ({"capture_successful_exits": True}, 1, True),
        ({"capture_successful_exits": True}, None, True),
        ({"capture_successful_exits": True}, "unsuccessful exit", True),
    ),
)
def test_sys_exit(
    sentry_init, capture_events, integration_params, exit_status, should_capture
):
    sentry_init(integrations=[SysExitIntegration(**integration_params)])

    events = capture_events()

    # Manually catch the sys.exit rather than using pytest.raises because IDE does not recognize that pytest.raises
    # will catch SystemExit.
    try:
        sys.exit(exit_status)
    except SystemExit:
        ...
    else:
        pytest.fail("Patched sys.exit did not raise SystemExit")

    if should_capture:
        (event,) = events
        (exception_value,) = event["exception"]["values"]

        assert exception_value["type"] == "SystemExit"
        assert exception_value["value"] == (
            str(exit_status) if exit_status is not None else ""
        )
    else:
        assert len(events) == 0


def test_sys_exit_integration_not_auto_enabled(sentry_init, capture_events):
    sentry_init()  # No SysExitIntegration

    events = capture_events()

    # Manually catch the sys.exit rather than using pytest.raises because IDE does not recognize that pytest.raises
    # will catch SystemExit.
    try:
        sys.exit(1)
    except SystemExit:
        ...
    else:
        pytest.fail(
            "sys.exit should not be patched, but it must have been because it did not raise SystemExit"
        )

    assert (
        len(events) == 0
    ), "No events should have been captured because sys.exit should not have been patched"
sentry-python-2.18.0/tests/integrations/test_gnu_backtrace.py000066400000000000000000000252011471214654000245220ustar00rootroot00000000000000import pytest

from sentry_sdk import capture_exception
from sentry_sdk.integrations.gnu_backtrace import GnuBacktraceIntegration

LINES = r"""
0. clickhouse-server(StackTrace::StackTrace()+0x16) [0x99d31a6]
1. clickhouse-server(DB::Exception::Exception(std::__cxx11::basic_string, std::allocator > const&, int)+0x22) [0x372c822]
10. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1a12) [0x6ae45d2]
10. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr const&, bool)+0x11af) [0x75c68ff]
10. clickhouse-server(ThreadPoolImpl::worker(std::_List_iterator)+0x1ab) [0x6f90c1b]
11. clickhouse-server() [0xae06ddf]
11. clickhouse-server(DB::ExpressionAnalyzer::getRootActions(std::shared_ptr const&, bool, std::shared_ptr&, bool)+0xdb) [0x6a0a63b]
11. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr const&, DB::Context const&, std::shared_ptr const&, std::shared_ptr const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x5e6) [0x75c7516]
12. /lib/x86_64-linux-gnu/libpthread.so.0(+0x8184) [0x7f3bbc568184]
12. clickhouse-server(DB::ExpressionAnalyzer::getConstActions()+0xc9) [0x6a0b059]
12. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr const&, DB::Context const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x56) [0x75c8276]
13. /lib/x86_64-linux-gnu/libc.so.6(clone+0x6d) [0x7f3bbbb8303d]
13. clickhouse-server(DB::InterpreterSelectWithUnionQuery::InterpreterSelectWithUnionQuery(std::shared_ptr const&, DB::Context const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x7e7) [0x75d4067]
13. clickhouse-server(DB::evaluateConstantExpression(std::shared_ptr const&, DB::Context const&)+0x3ed) [0x656bfdd]
14. clickhouse-server(DB::InterpreterFactory::get(std::shared_ptr&, DB::Context&, DB::QueryProcessingStage::Enum)+0x3a8) [0x75b0298]
14. clickhouse-server(DB::makeExplicitSet(DB::ASTFunction const*, DB::Block const&, bool, DB::Context const&, DB::SizeLimits const&, std::unordered_map, DB::PreparedSetKey::Hash, std::equal_to, std::allocator > > >&)+0x382) [0x6adf692]
15. clickhouse-server() [0x7664c79]
15. clickhouse-server(DB::ActionsVisitor::makeSet(DB::ASTFunction const*, DB::Block const&)+0x2a7) [0x6ae2227]
16. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1973) [0x6ae4533]
16. clickhouse-server(DB::executeQuery(std::__cxx11::basic_string, std::allocator > const&, DB::Context&, bool, DB::QueryProcessingStage::Enum)+0x8a) [0x76669fa]
17. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1324) [0x6ae3ee4]
17. clickhouse-server(DB::TCPHandler::runImpl()+0x4b9) [0x30973c9]
18. clickhouse-server(DB::ExpressionAnalyzer::getRootActions(std::shared_ptr const&, bool, std::shared_ptr&, bool)+0xdb) [0x6a0a63b]
18. clickhouse-server(DB::TCPHandler::run()+0x2b) [0x30985ab]
19. clickhouse-server(DB::ExpressionAnalyzer::appendGroupBy(DB::ExpressionActionsChain&, bool)+0x100) [0x6a0b4f0]
19. clickhouse-server(Poco::Net::TCPServerConnection::start()+0xf) [0x9b53e4f]
2. clickhouse-server(DB::FunctionTuple::getReturnTypeImpl(std::vector, std::allocator > > const&) const+0x122) [0x3a2a0f2]
2. clickhouse-server(DB::readException(DB::Exception&, DB::ReadBuffer&, std::__cxx11::basic_string, std::allocator > const&)+0x21f) [0x6fb253f]
2. clickhouse-server(void DB::readDateTimeTextFallback(long&, DB::ReadBuffer&, DateLUTImpl const&)+0x318) [0x99ffed8]
20. clickhouse-server(DB::InterpreterSelectQuery::analyzeExpressions(DB::QueryProcessingStage::Enum, bool)+0x364) [0x6437fa4]
20. clickhouse-server(Poco::Net::TCPServerDispatcher::run()+0x16a) [0x9b5422a]
21. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr const&, bool)+0x36d) [0x643c28d]
21. clickhouse-server(Poco::PooledThread::run()+0x77) [0x9c70f37]
22. clickhouse-server(DB::InterpreterSelectQuery::executeWithMultipleStreams()+0x50) [0x643ecd0]
22. clickhouse-server(Poco::ThreadImpl::runnableEntry(void*)+0x38) [0x9c6caa8]
23. clickhouse-server() [0xa3c68cf]
23. clickhouse-server(DB::InterpreterSelectWithUnionQuery::executeWithMultipleStreams()+0x6c) [0x644805c]
24. /lib/x86_64-linux-gnu/libpthread.so.0(+0x8184) [0x7fe839d2d184]
24. clickhouse-server(DB::InterpreterSelectWithUnionQuery::execute()+0x38) [0x6448658]
25. /lib/x86_64-linux-gnu/libc.so.6(clone+0x6d) [0x7fe83934803d]
25. clickhouse-server() [0x65744ef]
26. clickhouse-server(DB::executeQuery(std::__cxx11::basic_string, std::allocator > const&, DB::Context&, bool, DB::QueryProcessingStage::Enum, bool)+0x81) [0x6576141]
27. clickhouse-server(DB::TCPHandler::runImpl()+0x752) [0x3739f82]
28. clickhouse-server(DB::TCPHandler::run()+0x2b) [0x373a5cb]
29. clickhouse-server(Poco::Net::TCPServerConnection::start()+0xf) [0x708e63f]
3. clickhouse-server(DB::Connection::receiveException()+0x81) [0x67d3ad1]
3. clickhouse-server(DB::DefaultFunctionBuilder::getReturnTypeImpl(std::vector > const&) const+0x223) [0x38ac3b3]
3. clickhouse-server(DB::FunctionComparison::executeDateOrDateTimeOrEnumOrUUIDWithConstString(DB::Block&, unsigned long, DB::IColumn const*, DB::IColumn const*, std::shared_ptr const&, std::shared_ptr const&, bool, unsigned long)+0xbb3) [0x411dee3]
30. clickhouse-server(Poco::Net::TCPServerDispatcher::run()+0xe9) [0x708ed79]
31. clickhouse-server(Poco::PooledThread::run()+0x81) [0x7142011]
4. clickhouse-server(DB::Connection::receivePacket()+0x767) [0x67d9cd7]
4. clickhouse-server(DB::FunctionBuilderImpl::getReturnTypeWithoutLowCardinality(std::vector > const&) const+0x75) [0x6869635]
4. clickhouse-server(DB::FunctionComparison::executeImpl(DB::Block&, std::vector > const&, unsigned long, unsigned long)+0x576) [0x41ab006]
5. clickhouse-server(DB::FunctionBuilderImpl::getReturnType(std::vector > const&) const+0x350) [0x6869f10]
5. clickhouse-server(DB::MultiplexedConnections::receivePacket()+0x7e) [0x67e7ede]
5. clickhouse-server(DB::PreparedFunctionImpl::execute(DB::Block&, std::vector > const&, unsigned long, unsigned long)+0x3e2) [0x7933492]
6. clickhouse-server(DB::ExpressionAction::execute(DB::Block&, std::unordered_map, std::allocator >, unsigned long, std::hash, std::allocator > >, std::equal_to, std::allocator > >, std::allocator, std::allocator > const, unsigned long> > >&) const+0x61a) [0x7ae093a]
6. clickhouse-server(DB::FunctionBuilderImpl::build(std::vector > const&) const+0x3c) [0x38accfc]
6. clickhouse-server(DB::RemoteBlockInputStream::readImpl()+0x87) [0x631da97]
7. clickhouse-server(DB::ExpressionActions::addImpl(DB::ExpressionAction, std::vector, std::allocator >, std::allocator, std::allocator > > >&)+0x552) [0x6a00052]
7. clickhouse-server(DB::ExpressionActions::execute(DB::Block&) const+0xe6) [0x7ae1e06]
7. clickhouse-server(DB::IBlockInputStream::read()+0x178) [0x63075e8]
8. clickhouse-server(DB::ExpressionActions::add(DB::ExpressionAction const&, std::vector, std::allocator >, std::allocator, std::allocator > > >&)+0x42) [0x6a00422]
8. clickhouse-server(DB::FilterBlockInputStream::FilterBlockInputStream(std::shared_ptr const&, std::shared_ptr const&, std::__cxx11::basic_string, std::allocator > const&, bool)+0x711) [0x79970d1]
8. clickhouse-server(DB::ParallelInputsProcessor::thread(std::shared_ptr, unsigned long)+0x2f1) [0x64467c1]
9. clickhouse-server() [0x75bd5a3]
9. clickhouse-server(DB::ScopeStack::addAction(DB::ExpressionAction const&)+0xd2) [0x6ae04d2]
9. clickhouse-server(ThreadFromGlobalPool::ThreadFromGlobalPool::process()::{lambda()#1}>(DB::ParallelInputsProcessor::process()::{lambda()#1}&&)::{lambda()#1}::operator()() const+0x6d) [0x644722d]
"""


@pytest.mark.parametrize("input", LINES.strip().splitlines())
def test_basic(sentry_init, capture_events, input):
    sentry_init(integrations=[GnuBacktraceIntegration()])
    events = capture_events()

    try:
        raise ValueError(input)
    except ValueError:
        capture_exception()

    (event,) = events
    (exception,) = event["exception"]["values"]

    assert (
        exception["value"]
        == ""
    )
    (frame,) = exception["stacktrace"]["frames"][1:]

    if frame.get("function") is None:
        assert "clickhouse-server()" in input or "pthread" in input
    else:
        assert ")" not in frame["function"] and "(" not in frame["function"]
        assert frame["function"] in input
sentry-python-2.18.0/tests/integrations/threading/000077500000000000000000000000001471214654000222665ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/threading/test_threading.py000066400000000000000000000115461471214654000256530ustar00rootroot00000000000000import gc
from concurrent import futures
from threading import Thread

import pytest

import sentry_sdk
from sentry_sdk import capture_message
from sentry_sdk.integrations.threading import ThreadingIntegration

original_start = Thread.start
original_run = Thread.run


@pytest.mark.parametrize("integrations", [[ThreadingIntegration()], []])
def test_handles_exceptions(sentry_init, capture_events, integrations):
    sentry_init(default_integrations=False, integrations=integrations)
    events = capture_events()

    def crash():
        1 / 0

    t = Thread(target=crash)
    t.start()
    t.join()

    if integrations:
        (event,) = events

        (exception,) = event["exception"]["values"]
        assert exception["type"] == "ZeroDivisionError"
        assert exception["mechanism"]["type"] == "threading"
        assert not exception["mechanism"]["handled"]
    else:
        assert not events


@pytest.mark.parametrize("propagate_hub", (True, False))
def test_propagates_hub(sentry_init, capture_events, propagate_hub):
    sentry_init(
        default_integrations=False,
        integrations=[ThreadingIntegration(propagate_hub=propagate_hub)],
    )
    events = capture_events()

    def stage1():
        sentry_sdk.get_isolation_scope().set_tag("stage1", "true")

        t = Thread(target=stage2)
        t.start()
        t.join()

    def stage2():
        1 / 0

    t = Thread(target=stage1)
    t.start()
    t.join()

    (event,) = events

    (exception,) = event["exception"]["values"]

    assert exception["type"] == "ZeroDivisionError"
    assert exception["mechanism"]["type"] == "threading"
    assert not exception["mechanism"]["handled"]

    if propagate_hub:
        assert event["tags"]["stage1"] == "true"
    else:
        assert "stage1" not in event.get("tags", {})


@pytest.mark.parametrize("propagate_hub", (True, False))
def test_propagates_threadpool_hub(sentry_init, capture_events, propagate_hub):
    sentry_init(
        traces_sample_rate=1.0,
        integrations=[ThreadingIntegration(propagate_hub=propagate_hub)],
    )
    events = capture_events()

    def double(number):
        with sentry_sdk.start_span(op="task", name=str(number)):
            return number * 2

    with sentry_sdk.start_transaction(name="test_handles_threadpool"):
        with futures.ThreadPoolExecutor(max_workers=1) as executor:
            tasks = [executor.submit(double, number) for number in [1, 2, 3, 4]]
            for future in futures.as_completed(tasks):
                print("Getting future value!", future.result())

    sentry_sdk.flush()

    if propagate_hub:
        assert len(events) == 1
        (event,) = events
        assert event["spans"][0]["trace_id"] == event["spans"][1]["trace_id"]
        assert event["spans"][1]["trace_id"] == event["spans"][2]["trace_id"]
        assert event["spans"][2]["trace_id"] == event["spans"][3]["trace_id"]
        assert event["spans"][3]["trace_id"] == event["spans"][0]["trace_id"]
    else:
        (event,) = events
        assert len(event["spans"]) == 0


@pytest.mark.skip(reason="Temporarily disable to release SDK 2.0a1.")
def test_circular_references(sentry_init, request):
    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])

    gc.collect()
    gc.disable()
    request.addfinalizer(gc.enable)

    class MyThread(Thread):
        def run(self):
            pass

    t = MyThread()
    t.start()
    t.join()
    del t

    unreachable_objects = gc.collect()
    assert unreachable_objects == 0


def test_double_patching(sentry_init, capture_events):
    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
    events = capture_events()

    # XXX: Workaround for race condition in the py library's magic import
    # system (py is a dependency of pytest)
    capture_message("hi")
    del events[:]

    class MyThread(Thread):
        def run(self):
            1 / 0

    ts = []
    for _ in range(10):
        t = MyThread()
        t.start()
        ts.append(t)

    for t in ts:
        t.join()

    assert len(events) == 10
    for event in events:
        (exception,) = event["exception"]["values"]
        assert exception["type"] == "ZeroDivisionError"


def test_wrapper_attributes(sentry_init):
    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])

    def target():
        assert t.run.__name__ == "run"
        assert t.run.__qualname__ == original_run.__qualname__

    t = Thread(target=target)
    t.start()
    t.join()

    assert Thread.start.__name__ == "start"
    assert Thread.start.__qualname__ == original_start.__qualname__
    assert t.start.__name__ == "start"
    assert t.start.__qualname__ == original_start.__qualname__

    assert Thread.run.__name__ == "run"
    assert Thread.run.__qualname__ == original_run.__qualname__
    assert t.run.__name__ == "run"
    assert t.run.__qualname__ == original_run.__qualname__
sentry-python-2.18.0/tests/integrations/tornado/000077500000000000000000000000001471214654000217675ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/tornado/__init__.py000066400000000000000000000000561471214654000241010ustar00rootroot00000000000000import pytest

pytest.importorskip("tornado")
sentry-python-2.18.0/tests/integrations/tornado/test_tornado.py000066400000000000000000000326131471214654000250530ustar00rootroot00000000000000import json

import pytest

import sentry_sdk
from sentry_sdk import start_transaction, capture_message
from sentry_sdk.integrations.tornado import TornadoIntegration

from tornado.web import RequestHandler, Application, HTTPError
from tornado.testing import AsyncHTTPTestCase


@pytest.fixture
def tornado_testcase(request):
    # Take the unittest class provided by tornado and manually call its setUp
    # and tearDown.
    #
    # The pytest plugins for tornado seem too complicated to use, as they for
    # some reason assume I want to write my tests in async code.
    def inner(app):
        class TestBogus(AsyncHTTPTestCase):
            def get_app(self):
                return app

            def bogustest(self):
                # We need to pass a valid test method name to the ctor, so this
                # is the method. It does nothing.
                pass

        self = TestBogus("bogustest")
        self.setUp()
        request.addfinalizer(self.tearDown)
        return self

    return inner


class CrashingHandler(RequestHandler):
    def get(self):
        sentry_sdk.get_isolation_scope().set_tag("foo", "42")
        1 / 0

    def post(self):
        sentry_sdk.get_isolation_scope().set_tag("foo", "43")
        1 / 0


class CrashingWithMessageHandler(RequestHandler):
    def get(self):
        capture_message("hi")
        1 / 0


class HelloHandler(RequestHandler):
    async def get(self):
        sentry_sdk.get_isolation_scope().set_tag("foo", "42")

        return b"hello"

    async def post(self):
        sentry_sdk.get_isolation_scope().set_tag("foo", "43")

        return b"hello"


def test_basic(tornado_testcase, sentry_init, capture_events):
    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
    events = capture_events()
    client = tornado_testcase(Application([(r"/hi", CrashingHandler)]))

    response = client.fetch(
        "/hi?foo=bar", headers={"Cookie": "name=value; name2=value2; name3=value3"}
    )
    assert response.code == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    assert exception["mechanism"]["type"] == "tornado"

    request = event["request"]
    host = request["headers"]["Host"]
    assert event["request"] == {
        "env": {"REMOTE_ADDR": "127.0.0.1"},
        "headers": {
            "Accept-Encoding": "gzip",
            "Connection": "close",
            "Cookie": "name=value; name2=value2; name3=value3",
            **request["headers"],
        },
        "cookies": {"name": "value", "name2": "value2", "name3": "value3"},
        "method": "GET",
        "query_string": "foo=bar",
        "url": "http://{host}/hi".format(host=host),
    }

    assert event["tags"] == {"foo": "42"}
    assert (
        event["transaction"]
        == "tests.integrations.tornado.test_tornado.CrashingHandler.get"
    )
    assert event["transaction_info"] == {"source": "component"}

    assert not sentry_sdk.get_isolation_scope()._tags


@pytest.mark.parametrize(
    "handler,code",
    [
        (CrashingHandler, 500),
        (HelloHandler, 200),
    ],
)
def test_transactions(tornado_testcase, sentry_init, capture_events, handler, code):
    sentry_init(integrations=[TornadoIntegration()], traces_sample_rate=1.0)
    events = capture_events()
    client = tornado_testcase(Application([(r"/hi", handler)]))

    with start_transaction(name="client") as span:
        pass

    response = client.fetch(
        "/hi", method="POST", body=b"heyoo", headers=dict(span.iter_headers())
    )
    assert response.code == code

    if code == 200:
        client_tx, server_tx = events
        server_error = None
    else:
        client_tx, server_error, server_tx = events

    assert client_tx["type"] == "transaction"
    assert client_tx["transaction"] == "client"
    assert client_tx["transaction_info"] == {
        "source": "custom"
    }  # because this is just the start_transaction() above.

    if server_error is not None:
        assert server_error["exception"]["values"][0]["type"] == "ZeroDivisionError"
        assert (
            server_error["transaction"]
            == "tests.integrations.tornado.test_tornado.CrashingHandler.post"
        )
        assert server_error["transaction_info"] == {"source": "component"}

    if code == 200:
        assert (
            server_tx["transaction"]
            == "tests.integrations.tornado.test_tornado.HelloHandler.post"
        )
    else:
        assert (
            server_tx["transaction"]
            == "tests.integrations.tornado.test_tornado.CrashingHandler.post"
        )

    assert server_tx["transaction_info"] == {"source": "component"}
    assert server_tx["type"] == "transaction"

    request = server_tx["request"]
    host = request["headers"]["Host"]
    assert server_tx["request"] == {
        "env": {"REMOTE_ADDR": "127.0.0.1"},
        "headers": {
            "Accept-Encoding": "gzip",
            "Connection": "close",
            **request["headers"],
        },
        "method": "POST",
        "query_string": "",
        "data": {"heyoo": [""]},
        "url": "http://{host}/hi".format(host=host),
    }

    assert (
        client_tx["contexts"]["trace"]["trace_id"]
        == server_tx["contexts"]["trace"]["trace_id"]
    )

    if server_error is not None:
        assert (
            server_error["contexts"]["trace"]["trace_id"]
            == server_tx["contexts"]["trace"]["trace_id"]
        )


def test_400_not_logged(tornado_testcase, sentry_init, capture_events):
    sentry_init(integrations=[TornadoIntegration()])
    events = capture_events()

    class CrashingHandler(RequestHandler):
        def get(self):
            raise HTTPError(400, "Oops")

    client = tornado_testcase(Application([(r"/", CrashingHandler)]))

    response = client.fetch("/")
    assert response.code == 400

    assert not events


def test_user_auth(tornado_testcase, sentry_init, capture_events):
    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
    events = capture_events()

    class UserHandler(RequestHandler):
        def get(self):
            1 / 0

        def get_current_user(self):
            return 42

    class NoUserHandler(RequestHandler):
        def get(self):
            1 / 0

    client = tornado_testcase(
        Application([(r"/auth", UserHandler), (r"/noauth", NoUserHandler)])
    )

    # has user
    response = client.fetch("/auth")
    assert response.code == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"

    assert event["user"] == {"is_authenticated": True}

    events.clear()

    # has no user
    response = client.fetch("/noauth")
    assert response.code == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"

    assert "user" not in event


def test_formdata(tornado_testcase, sentry_init, capture_events):
    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
    events = capture_events()

    class FormdataHandler(RequestHandler):
        def post(self):
            raise ValueError(json.dumps(sorted(self.request.body_arguments)))

    client = tornado_testcase(Application([(r"/form", FormdataHandler)]))

    response = client.fetch(
        "/form?queryarg=1",
        method="POST",
        headers={"Content-Type": "application/x-www-form-urlencoded"},
        body=b"field1=value1&field2=value2",
    )

    assert response.code == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["value"] == '["field1", "field2"]'
    assert event["request"]["data"] == {"field1": ["value1"], "field2": ["value2"]}


def test_json(tornado_testcase, sentry_init, capture_events):
    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
    events = capture_events()

    class FormdataHandler(RequestHandler):
        def post(self):
            raise ValueError(json.dumps(sorted(self.request.body_arguments)))

    client = tornado_testcase(Application([(r"/form", FormdataHandler)]))

    response = client.fetch(
        "/form?queryarg=1",
        method="POST",
        headers={"Content-Type": "application/json"},
        body=b"""
        {"foo": {"bar": 42}}
        """,
    )

    assert response.code == 500

    (event,) = events
    (exception,) = event["exception"]["values"]
    assert exception["value"] == "[]"
    assert event
    assert event["request"]["data"] == {"foo": {"bar": 42}}


def test_error_has_new_trace_context_performance_enabled(
    tornado_testcase, sentry_init, capture_events
):
    """
    Check if an 'trace' context is added to errros and transactions when performance monitoring is enabled.
    """
    sentry_init(
        integrations=[TornadoIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
    client.fetch("/hi")

    (msg_event, error_event, transaction_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
    )


def test_error_has_new_trace_context_performance_disabled(
    tornado_testcase, sentry_init, capture_events
):
    """
    Check if an 'trace' context is added to errros and transactions when performance monitoring is disabled.
    """
    sentry_init(
        integrations=[TornadoIntegration()],
        traces_sample_rate=None,  # this is the default, just added for clarity
    )
    events = capture_events()

    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
    client.fetch("/hi")

    (msg_event, error_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
    )


def test_error_has_existing_trace_context_performance_enabled(
    tornado_testcase, sentry_init, capture_events
):
    """
    Check if an 'trace' context is added to errros and transactions
    from the incoming 'sentry-trace' header when performance monitoring is enabled.
    """
    sentry_init(
        integrations=[TornadoIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    headers = {"sentry-trace": sentry_trace_header}

    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
    client.fetch("/hi", headers=headers)

    (msg_event, error_event, transaction_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert "trace" in transaction_event["contexts"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )


def test_error_has_existing_trace_context_performance_disabled(
    tornado_testcase, sentry_init, capture_events
):
    """
    Check if an 'trace' context is added to errros and transactions
    from the incoming 'sentry-trace' header when performance monitoring is disabled.
    """
    sentry_init(
        integrations=[TornadoIntegration()],
        traces_sample_rate=None,  # this is the default, just added for clarity
    )
    events = capture_events()

    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)

    headers = {"sentry-trace": sentry_trace_header}

    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
    client.fetch("/hi", headers=headers)

    (msg_event, error_event) = events

    assert "trace" in msg_event["contexts"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert "trace" in error_event["contexts"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == "471a43a4192642f0b136d5159a501701"
    )


def test_span_origin(tornado_testcase, sentry_init, capture_events):
    sentry_init(integrations=[TornadoIntegration()], traces_sample_rate=1.0)
    events = capture_events()
    client = tornado_testcase(Application([(r"/hi", CrashingHandler)]))

    client.fetch(
        "/hi?foo=bar", headers={"Cookie": "name=value; name2=value2; name3=value3"}
    )

    (_, event) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.tornado"
sentry-python-2.18.0/tests/integrations/trytond/000077500000000000000000000000001471214654000220245ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/trytond/__init__.py000066400000000000000000000000561471214654000241360ustar00rootroot00000000000000import pytest

pytest.importorskip("trytond")
sentry-python-2.18.0/tests/integrations/trytond/test_trytond.py000066400000000000000000000075471471214654000251550ustar00rootroot00000000000000import json
import unittest.mock

import pytest

import trytond
from trytond.exceptions import TrytonException as TrytondBaseException
from trytond.exceptions import UserError as TrytondUserError
from trytond.exceptions import UserWarning as TrytondUserWarning
from trytond.exceptions import LoginException
from trytond.wsgi import app as trytond_app

from werkzeug.test import Client

from sentry_sdk.integrations.trytond import TrytondWSGIIntegration
from tests.conftest import unpack_werkzeug_response


@pytest.fixture(scope="function")
def app(sentry_init):
    yield trytond_app


@pytest.fixture
def get_client(app):
    def inner():
        return Client(app)

    return inner


@pytest.mark.parametrize(
    "exception", [Exception("foo"), type("FooException", (Exception,), {})("bar")]
)
def test_exceptions_captured(
    sentry_init, app, capture_exceptions, get_client, exception
):
    sentry_init(integrations=[TrytondWSGIIntegration()])
    exceptions = capture_exceptions()

    unittest.mock.sentinel.exception = exception

    @app.route("/exception")
    def _(request):
        raise unittest.mock.sentinel.exception

    client = get_client()
    _ = client.get("/exception")

    (e,) = exceptions
    assert e is exception


@pytest.mark.parametrize(
    "exception",
    [
        TrytondUserError("title"),
        TrytondUserWarning("title", "details"),
        LoginException("title", "details"),
    ],
)
def test_trytonderrors_not_captured(
    sentry_init, app, capture_exceptions, get_client, exception
):
    sentry_init(integrations=[TrytondWSGIIntegration()])
    exceptions = capture_exceptions()

    unittest.mock.sentinel.exception = exception

    @app.route("/usererror")
    def _(request):
        raise unittest.mock.sentinel.exception

    client = get_client()
    _ = client.get("/usererror")

    assert not exceptions


@pytest.mark.skipif(
    trytond.__version__.split(".") < ["5", "4"], reason="At least Trytond-5.4 required"
)
def test_rpc_error_page(sentry_init, app, get_client):
    """Test that, after initializing the Trytond-SentrySDK integration
    a custom error handler can be registered to the Trytond WSGI app so as to
    inform the event identifiers to the Tryton RPC client"""

    sentry_init(integrations=[TrytondWSGIIntegration()])

    @app.route("/rpcerror", methods=["POST"])
    def _(request):
        raise Exception("foo")

    @app.error_handler
    def _(app, request, e):
        if isinstance(e, TrytondBaseException):
            return
        else:
            data = TrytondUserError("Sentry error.", str(e))
            return app.make_response(request, data)

    client = get_client()

    # This would look like a natural Tryton RPC call
    _data = dict(
        id=42,  # request sequence
        method="class.method",  # rpc call
        params=[
            [1234],  # ids
            ["bar", "baz"],  # values
            dict(  # context
                client="12345678-9abc-def0-1234-56789abc",
                groups=[1],
                language="ca",
                language_direction="ltr",
            ),
        ],
    )
    response = client.post(
        "/rpcerror", content_type="application/json", data=json.dumps(_data)
    )

    (content, status, headers) = unpack_werkzeug_response(response)
    data = json.loads(content)
    assert status == "200 OK"
    assert headers.get("Content-Type") == "application/json"
    assert data == dict(id=42, error=["UserError", ["Sentry error.", "foo", None]])


def test_span_origin(sentry_init, app, capture_events, get_client):
    sentry_init(
        integrations=[TrytondWSGIIntegration()],
        traces_sample_rate=1.0,
    )
    events = capture_events()

    @app.route("/something")
    def _(request):
        return "ok"

    client = get_client()
    client.get("/something")

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "auto.http.trytond_wsgi"
sentry-python-2.18.0/tests/integrations/wsgi/000077500000000000000000000000001471214654000212725ustar00rootroot00000000000000sentry-python-2.18.0/tests/integrations/wsgi/test_wsgi.py000066400000000000000000000346311471214654000236630ustar00rootroot00000000000000from collections import Counter
from unittest import mock

import pytest
from werkzeug.test import Client

import sentry_sdk
from sentry_sdk import capture_message
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware


@pytest.fixture
def crashing_app():
    def app(environ, start_response):
        1 / 0

    return app


class IterableApp:
    def __init__(self, iterable):
        self.iterable = iterable

    def __call__(self, environ, start_response):
        return self.iterable


class ExitingIterable:
    def __init__(self, exc_func):
        self._exc_func = exc_func

    def __iter__(self):
        return self

    def __next__(self):
        raise self._exc_func()

    def next(self):
        return type(self).__next__(self)


def test_basic(sentry_init, crashing_app, capture_events):
    sentry_init(send_default_pii=True)
    app = SentryWsgiMiddleware(crashing_app)
    client = Client(app)
    events = capture_events()

    with pytest.raises(ZeroDivisionError):
        client.get("/")

    (event,) = events

    assert event["transaction"] == "generic WSGI request"

    assert event["request"] == {
        "env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
        "headers": {"Host": "localhost"},
        "method": "GET",
        "query_string": "",
        "url": "http://localhost/",
    }


@pytest.mark.parametrize("path_info", ("bark/", "/bark/"))
@pytest.mark.parametrize("script_name", ("woof/woof", "woof/woof/"))
def test_script_name_is_respected(
    sentry_init, crashing_app, capture_events, script_name, path_info
):
    sentry_init(send_default_pii=True)
    app = SentryWsgiMiddleware(crashing_app)
    client = Client(app)
    events = capture_events()

    with pytest.raises(ZeroDivisionError):
        # setting url with PATH_INFO: bark/, HTTP_HOST: dogs.are.great and SCRIPT_NAME: woof/woof/
        client.get(path_info, f"https://dogs.are.great/{script_name}")  # noqa: E231

    (event,) = events

    assert event["request"]["url"] == "https://dogs.are.great/woof/woof/bark/"


@pytest.fixture(params=[0, None])
def test_systemexit_zero_is_ignored(sentry_init, capture_events, request):
    zero_code = request.param
    sentry_init(send_default_pii=True)
    iterable = ExitingIterable(lambda: SystemExit(zero_code))
    app = SentryWsgiMiddleware(IterableApp(iterable))
    client = Client(app)
    events = capture_events()

    with pytest.raises(SystemExit):
        client.get("/")

    assert len(events) == 0


@pytest.fixture(params=["", "foo", 1, 2])
def test_systemexit_nonzero_is_captured(sentry_init, capture_events, request):
    nonzero_code = request.param
    sentry_init(send_default_pii=True)
    iterable = ExitingIterable(lambda: SystemExit(nonzero_code))
    app = SentryWsgiMiddleware(IterableApp(iterable))
    client = Client(app)
    events = capture_events()

    with pytest.raises(SystemExit):
        client.get("/")

    (event,) = events

    assert "exception" in event
    exc = event["exception"]["values"][-1]
    assert exc["type"] == "SystemExit"
    assert exc["value"] == nonzero_code
    assert event["level"] == "error"


def test_keyboard_interrupt_is_captured(sentry_init, capture_events):
    sentry_init(send_default_pii=True)
    iterable = ExitingIterable(lambda: KeyboardInterrupt())
    app = SentryWsgiMiddleware(IterableApp(iterable))
    client = Client(app)
    events = capture_events()

    with pytest.raises(KeyboardInterrupt):
        client.get("/")

    (event,) = events

    assert "exception" in event
    exc = event["exception"]["values"][-1]
    assert exc["type"] == "KeyboardInterrupt"
    assert exc["value"] == ""
    assert event["level"] == "error"


def test_transaction_with_error(
    sentry_init, crashing_app, capture_events, DictionaryContaining  # noqa:N803
):
    def dogpark(environ, start_response):
        raise ValueError("Fetch aborted. The ball was not returned.")

    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    with pytest.raises(ValueError):
        client.get("http://dogs.are.great/sit/stay/rollover/")

    error_event, envelope = events

    assert error_event["transaction"] == "generic WSGI request"
    assert error_event["contexts"]["trace"]["op"] == "http.server"
    assert error_event["exception"]["values"][0]["type"] == "ValueError"
    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "wsgi"
    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
    assert (
        error_event["exception"]["values"][0]["value"]
        == "Fetch aborted. The ball was not returned."
    )

    assert envelope["type"] == "transaction"

    # event trace context is a subset of envelope trace context
    assert envelope["contexts"]["trace"] == DictionaryContaining(
        error_event["contexts"]["trace"]
    )
    assert envelope["contexts"]["trace"]["status"] == "internal_error"
    assert envelope["transaction"] == error_event["transaction"]
    assert envelope["request"] == error_event["request"]


def test_transaction_no_error(
    sentry_init, capture_events, DictionaryContaining  # noqa:N803
):
    def dogpark(environ, start_response):
        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    client.get("/dogs/are/great/")

    envelope = events[0]

    assert envelope["type"] == "transaction"
    assert envelope["transaction"] == "generic WSGI request"
    assert envelope["contexts"]["trace"]["op"] == "http.server"
    assert envelope["request"] == DictionaryContaining(
        {"method": "GET", "url": "http://localhost/dogs/are/great/"}
    )


def test_has_trace_if_performance_enabled(
    sentry_init,
    capture_events,
):
    def dogpark(environ, start_response):
        capture_message("Attempting to fetch the ball")
        raise ValueError("Fetch aborted. The ball was not returned.")

    sentry_init(traces_sample_rate=1.0)
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    with pytest.raises(ValueError):
        client.get("http://dogs.are.great/sit/stay/rollover/")

    msg_event, error_event, transaction_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert (
        msg_event["contexts"]["trace"]["trace_id"]
        == error_event["contexts"]["trace"]["trace_id"]
        == transaction_event["contexts"]["trace"]["trace_id"]
    )


def test_has_trace_if_performance_disabled(
    sentry_init,
    capture_events,
):
    def dogpark(environ, start_response):
        capture_message("Attempting to fetch the ball")
        raise ValueError("Fetch aborted. The ball was not returned.")

    sentry_init()
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    with pytest.raises(ValueError):
        client.get("http://dogs.are.great/sit/stay/rollover/")

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]


def test_trace_from_headers_if_performance_enabled(
    sentry_init,
    capture_events,
):
    def dogpark(environ, start_response):
        capture_message("Attempting to fetch the ball")
        raise ValueError("Fetch aborted. The ball was not returned.")

    sentry_init(traces_sample_rate=1.0)
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    with pytest.raises(ValueError):
        client.get(
            "http://dogs.are.great/sit/stay/rollover/",
            headers={"sentry-trace": sentry_trace_header},
        )

    msg_event, error_event, transaction_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]

    assert transaction_event["contexts"]["trace"]
    assert "trace_id" in transaction_event["contexts"]["trace"]

    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id


def test_trace_from_headers_if_performance_disabled(
    sentry_init,
    capture_events,
):
    def dogpark(environ, start_response):
        capture_message("Attempting to fetch the ball")
        raise ValueError("Fetch aborted. The ball was not returned.")

    sentry_init()
    app = SentryWsgiMiddleware(dogpark)
    client = Client(app)
    events = capture_events()

    trace_id = "582b43a4192642f0b136d5159a501701"
    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)

    with pytest.raises(ValueError):
        client.get(
            "http://dogs.are.great/sit/stay/rollover/",
            headers={"sentry-trace": sentry_trace_header},
        )

    msg_event, error_event = events

    assert msg_event["contexts"]["trace"]
    assert "trace_id" in msg_event["contexts"]["trace"]
    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id

    assert error_event["contexts"]["trace"]
    assert "trace_id" in error_event["contexts"]["trace"]
    assert error_event["contexts"]["trace"]["trace_id"] == trace_id


def test_traces_sampler_gets_correct_values_in_sampling_context(
    sentry_init,
    DictionaryContaining,  # noqa:N803
):
    def app(environ, start_response):
        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    traces_sampler = mock.Mock(return_value=True)
    sentry_init(send_default_pii=True, traces_sampler=traces_sampler)
    app = SentryWsgiMiddleware(app)
    client = Client(app)

    client.get("/dogs/are/great/")

    traces_sampler.assert_any_call(
        DictionaryContaining(
            {
                "wsgi_environ": DictionaryContaining(
                    {
                        "PATH_INFO": "/dogs/are/great/",
                        "REQUEST_METHOD": "GET",
                    },
                ),
            }
        )
    )


def test_session_mode_defaults_to_request_mode_in_wsgi_handler(
    capture_envelopes, sentry_init
):
    """
    Test that ensures that even though the default `session_mode` for
    auto_session_tracking is `application`, that flips to `request` when we are
    in the WSGI handler
    """

    def app(environ, start_response):
        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    traces_sampler = mock.Mock(return_value=True)
    sentry_init(send_default_pii=True, traces_sampler=traces_sampler)
    app = SentryWsgiMiddleware(app)
    envelopes = capture_envelopes()

    client = Client(app)

    client.get("/dogs/are/great/")

    sentry_sdk.flush()

    sess = envelopes[1]
    assert len(sess.items) == 1
    sess_event = sess.items[0].payload.json

    aggregates = sess_event["aggregates"]
    assert len(aggregates) == 1
    assert aggregates[0]["exited"] == 1


def test_auto_session_tracking_with_aggregates(sentry_init, capture_envelopes):
    """
    Test for correct session aggregates in auto session tracking.
    """

    def sample_app(environ, start_response):
        if environ["REQUEST_URI"] != "/dogs/are/great/":
            1 / 0

        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    traces_sampler = mock.Mock(return_value=True)
    sentry_init(send_default_pii=True, traces_sampler=traces_sampler)
    app = SentryWsgiMiddleware(sample_app)
    envelopes = capture_envelopes()
    assert len(envelopes) == 0

    client = Client(app)
    client.get("/dogs/are/great/")
    client.get("/dogs/are/great/")
    try:
        client.get("/trigger/an/error/")
    except ZeroDivisionError:
        pass

    sentry_sdk.flush()

    count_item_types = Counter()
    for envelope in envelopes:
        count_item_types[envelope.items[0].type] += 1

    assert count_item_types["transaction"] == 3
    assert count_item_types["event"] == 1
    assert count_item_types["sessions"] == 1
    assert len(envelopes) == 5

    session_aggregates = envelopes[-1].items[0].payload.json["aggregates"]
    assert session_aggregates[0]["exited"] == 2
    assert session_aggregates[0]["crashed"] == 1
    assert len(session_aggregates) == 1


@mock.patch("sentry_sdk.profiler.transaction_profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_profile_sent(
    sentry_init,
    capture_envelopes,
    teardown_profiling,
):
    def test_app(environ, start_response):
        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    sentry_init(
        traces_sample_rate=1.0,
        _experiments={"profiles_sample_rate": 1.0},
    )
    app = SentryWsgiMiddleware(test_app)
    envelopes = capture_envelopes()

    client = Client(app)
    client.get("/")

    envelopes = [envelope for envelope in envelopes]
    assert len(envelopes) == 1

    profiles = [item for item in envelopes[0].items if item.type == "profile"]
    assert len(profiles) == 1


def test_span_origin_manual(sentry_init, capture_events):
    def dogpark(environ, start_response):
        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryWsgiMiddleware(dogpark)

    events = capture_events()

    client = Client(app)
    client.get("/dogs/are/great/")

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "manual"


def test_span_origin_custom(sentry_init, capture_events):
    def dogpark(environ, start_response):
        start_response("200 OK", [])
        return ["Go get the ball! Good dog!"]

    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
    app = SentryWsgiMiddleware(
        dogpark,
        span_origin="auto.dogpark.deluxe",
    )

    events = capture_events()

    client = Client(app)
    client.get("/dogs/are/great/")

    (event,) = events

    assert event["contexts"]["trace"]["origin"] == "auto.dogpark.deluxe"
sentry-python-2.18.0/tests/new_scopes_compat/000077500000000000000000000000001471214654000213235ustar00rootroot00000000000000sentry-python-2.18.0/tests/new_scopes_compat/__init__.py000066400000000000000000000004551471214654000234400ustar00rootroot00000000000000"""
Separate module for tests that check backwards compatibility of the Hub API with 1.x.
These tests should be removed once we remove the Hub API, likely in the next major.

All tests in this module are run with hub isolation, provided by `isolate_hub` autouse
fixture, defined in `conftest.py`.
"""
sentry-python-2.18.0/tests/new_scopes_compat/conftest.py000066400000000000000000000002351471214654000235220ustar00rootroot00000000000000import pytest
import sentry_sdk


@pytest.fixture(autouse=True)
def isolate_hub(suppress_deprecation_warnings):
    with sentry_sdk.Hub(None):
        yield
sentry-python-2.18.0/tests/new_scopes_compat/test_new_scopes_compat.py000066400000000000000000000207561471214654000264560ustar00rootroot00000000000000import sentry_sdk
from sentry_sdk.hub import Hub

"""
Those tests are meant to check the compatibility of the new scopes in SDK 2.0 with the old Hub/Scope system in SDK 1.x.

Those tests have been run with the latest SDK 1.x versiona and the data used in the `assert` statements represents
the behvaior of the SDK 1.x.

This makes sure that we are backwards compatible. (on a best effort basis, there will probably be some edge cases that are not covered here)
"""


def test_configure_scope_sdk1(sentry_init, capture_events):
    """
    Mutate data in a `with configure_scope` block.

    Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
    """
    sentry_init()

    events = capture_events()

    sentry_sdk.set_tag("A", 1)
    sentry_sdk.capture_message("Event A")

    with sentry_sdk.configure_scope() as scope:  # configure scope
        sentry_sdk.set_tag("B1", 1)
        scope.set_tag("B2", 1)
        sentry_sdk.capture_message("Event B")

    sentry_sdk.set_tag("Z", 1)
    sentry_sdk.capture_message("Event Z")

    (event_a, event_b, event_z) = events

    # Check against the results the same code returned in SDK 1.x
    assert event_a["tags"] == {"A": 1}
    assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1}
    assert event_z["tags"] == {"A": 1, "B1": 1, "B2": 1, "Z": 1}


def test_push_scope_sdk1(sentry_init, capture_events):
    """
    Mutate data in a `with push_scope` block

    Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
    """
    sentry_init()

    events = capture_events()

    sentry_sdk.set_tag("A", 1)
    sentry_sdk.capture_message("Event A")

    with sentry_sdk.push_scope() as scope:  # push scope
        sentry_sdk.set_tag("B1", 1)
        scope.set_tag("B2", 1)
        sentry_sdk.capture_message("Event B")

    sentry_sdk.set_tag("Z", 1)
    sentry_sdk.capture_message("Event Z")

    (event_a, event_b, event_z) = events

    # Check against the results the same code returned in SDK 1.x
    assert event_a["tags"] == {"A": 1}
    assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1}
    assert event_z["tags"] == {"A": 1, "Z": 1}


def test_with_hub_sdk1(sentry_init, capture_events):
    """
    Mutate data in a `with Hub:` block

    Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
    """
    sentry_init()

    events = capture_events()

    sentry_sdk.set_tag("A", 1)
    sentry_sdk.capture_message("Event A")

    with Hub.current as hub:  # with hub
        sentry_sdk.set_tag("B1", 1)
        hub.scope.set_tag("B2", 1)
        sentry_sdk.capture_message("Event B")

    sentry_sdk.set_tag("Z", 1)
    sentry_sdk.capture_message("Event Z")

    (event_a, event_b, event_z) = events

    # Check against the results the same code returned in SDK 1.x
    assert event_a["tags"] == {"A": 1}
    assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1}
    assert event_z["tags"] == {"A": 1, "B1": 1, "B2": 1, "Z": 1}


def test_with_hub_configure_scope_sdk1(sentry_init, capture_events):
    """
    Mutate data in a `with Hub:` containing a `with configure_scope` block

    Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
    """
    sentry_init()

    events = capture_events()

    sentry_sdk.set_tag("A", 1)
    sentry_sdk.capture_message("Event A")

    with Hub.current as hub:  # with hub
        sentry_sdk.set_tag("B1", 1)
        with hub.configure_scope() as scope:  # configure scope
            sentry_sdk.set_tag("B2", 1)
            hub.scope.set_tag("B3", 1)
            scope.set_tag("B4", 1)
            sentry_sdk.capture_message("Event B")
        sentry_sdk.set_tag("B5", 1)
        sentry_sdk.capture_message("Event C")

    sentry_sdk.set_tag("Z", 1)
    sentry_sdk.capture_message("Event Z")

    (event_a, event_b, event_c, event_z) = events

    # Check against the results the same code returned in SDK 1.x
    assert event_a["tags"] == {"A": 1}
    assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1}
    assert event_c["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1, "B5": 1}
    assert event_z["tags"] == {
        "A": 1,
        "B1": 1,
        "B2": 1,
        "B3": 1,
        "B4": 1,
        "B5": 1,
        "Z": 1,
    }


def test_with_hub_push_scope_sdk1(sentry_init, capture_events):
    """
    Mutate data in a `with Hub:` containing a `with push_scope` block

    Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
    """
    sentry_init()

    events = capture_events()

    sentry_sdk.set_tag("A", 1)
    sentry_sdk.capture_message("Event A")

    with Hub.current as hub:  # with hub
        sentry_sdk.set_tag("B1", 1)
        with hub.push_scope() as scope:  # push scope
            sentry_sdk.set_tag("B2", 1)
            hub.scope.set_tag("B3", 1)
            scope.set_tag("B4", 1)
            sentry_sdk.capture_message("Event B")
        sentry_sdk.set_tag("B5", 1)
        sentry_sdk.capture_message("Event C")

    sentry_sdk.set_tag("Z", 1)
    sentry_sdk.capture_message("Event Z")

    (event_a, event_b, event_c, event_z) = events

    # Check against the results the same code returned in SDK 1.x
    assert event_a["tags"] == {"A": 1}
    assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1}
    assert event_c["tags"] == {"A": 1, "B1": 1, "B5": 1}
    assert event_z["tags"] == {"A": 1, "B1": 1, "B5": 1, "Z": 1}


def test_with_cloned_hub_sdk1(sentry_init, capture_events):
    """
    Mutate data in a `with cloned Hub:` block

    Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
    """
    sentry_init()

    events = capture_events()

    sentry_sdk.set_tag("A", 1)
    sentry_sdk.capture_message("Event A")

    with Hub(Hub.current) as hub:  # clone hub
        sentry_sdk.set_tag("B1", 1)
        hub.scope.set_tag("B2", 1)
        sentry_sdk.capture_message("Event B")

    sentry_sdk.set_tag("Z", 1)
    sentry_sdk.capture_message("Event Z")

    (event_a, event_b, event_z) = events

    # Check against the results the same code returned in SDK 1.x
    assert event_a["tags"] == {"A": 1}
    assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1}
    assert event_z["tags"] == {"A": 1, "Z": 1}


def test_with_cloned_hub_configure_scope_sdk1(sentry_init, capture_events):
    """
    Mutate data in a `with cloned Hub:` containing a `with configure_scope` block

    Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
    """
    sentry_init()

    events = capture_events()

    sentry_sdk.set_tag("A", 1)
    sentry_sdk.capture_message("Event A")

    with Hub(Hub.current) as hub:  # clone hub
        sentry_sdk.set_tag("B1", 1)
        with hub.configure_scope() as scope:  # configure scope
            sentry_sdk.set_tag("B2", 1)
            hub.scope.set_tag("B3", 1)
            scope.set_tag("B4", 1)
            sentry_sdk.capture_message("Event B")
        sentry_sdk.set_tag("B5", 1)
        sentry_sdk.capture_message("Event C")

    sentry_sdk.set_tag("Z", 1)
    sentry_sdk.capture_message("Event Z")

    (event_a, event_b, event_c, event_z) = events

    # Check against the results the same code returned in SDK 1.x
    assert event_a["tags"] == {"A": 1}
    assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1}
    assert event_c["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1, "B5": 1}
    assert event_z["tags"] == {"A": 1, "Z": 1}


def test_with_cloned_hub_push_scope_sdk1(sentry_init, capture_events):
    """
    Mutate data in a `with cloned Hub:` containing a `with push_scope` block

    Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
    """
    sentry_init()

    events = capture_events()

    sentry_sdk.set_tag("A", 1)
    sentry_sdk.capture_message("Event A")

    with Hub(Hub.current) as hub:  # clone hub
        sentry_sdk.set_tag("B1", 1)
        with hub.push_scope() as scope:  # push scope
            sentry_sdk.set_tag("B2", 1)
            hub.scope.set_tag("B3", 1)
            scope.set_tag("B4", 1)
            sentry_sdk.capture_message("Event B")
        sentry_sdk.set_tag("B5", 1)
        sentry_sdk.capture_message("Event C")

    sentry_sdk.set_tag("Z", 1)
    sentry_sdk.capture_message("Event Z")

    (event_a, event_b, event_c, event_z) = events

    # Check against the results the same code returned in SDK 1.x
    assert event_a["tags"] == {"A": 1}
    assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1}
    assert event_c["tags"] == {"A": 1, "B1": 1, "B5": 1}
    assert event_z["tags"] == {"A": 1, "Z": 1}
sentry-python-2.18.0/tests/new_scopes_compat/test_new_scopes_compat_event.py000066400000000000000000000422511471214654000276510ustar00rootroot00000000000000import pytest

from unittest import mock

import sentry_sdk
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import iter_default_integrations
from sentry_sdk.scrubber import EventScrubber, DEFAULT_DENYLIST


"""
Those tests are meant to check the compatibility of the new scopes in SDK 2.0 with the old Hub/Scope system in SDK 1.x.

Those tests have been run with the latest SDK 1.x version and the data used in the `assert` statements represents
the behvaior of the SDK 1.x.

This makes sure that we are backwards compatible. (on a best effort basis, there will probably be some edge cases that are not covered here)
"""


@pytest.fixture
def integrations():
    return [
        integration.identifier
        for integration in iter_default_integrations(
            with_auto_enabling_integrations=False
        )
    ]


@pytest.fixture
def expected_error(integrations):
    def create_expected_error_event(trx, span):
        return {
            "level": "warning-X",
            "exception": {
                "values": [
                    {
                        "mechanism": {"type": "generic", "handled": True},
                        "module": None,
                        "type": "ValueError",
                        "value": "This is a test exception",
                        "stacktrace": {
                            "frames": [
                                {
                                    "filename": "tests/new_scopes_compat/test_new_scopes_compat_event.py",
                                    "abs_path": mock.ANY,
                                    "function": "_faulty_function",
                                    "module": "tests.new_scopes_compat.test_new_scopes_compat_event",
                                    "lineno": mock.ANY,
                                    "pre_context": [
                                        "    return create_expected_transaction_event",
                                        "",
                                        "",
                                        "def _faulty_function():",
                                        "    try:",
                                    ],
                                    "context_line": '        raise ValueError("This is a test exception")',
                                    "post_context": [
                                        "    except ValueError as ex:",
                                        "        sentry_sdk.capture_exception(ex)",
                                        "",
                                        "",
                                        "def _test_before_send(event, hint):",
                                    ],
                                    "vars": {
                                        "ex": mock.ANY,
                                    },
                                    "in_app": True,
                                }
                            ]
                        },
                    }
                ]
            },
            "event_id": mock.ANY,
            "timestamp": mock.ANY,
            "contexts": {
                "character": {
                    "name": "Mighty Fighter changed by before_send",
                    "age": 19,
                    "attack_type": "melee",
                },
                "trace": {
                    "trace_id": trx.trace_id,
                    "span_id": span.span_id,
                    "parent_span_id": span.parent_span_id,
                    "op": "test_span",
                    "origin": "manual",
                    "description": None,
                    "data": {
                        "thread.id": mock.ANY,
                        "thread.name": "MainThread",
                    },
                },
                "runtime": {
                    "name": "CPython",
                    "version": mock.ANY,
                    "build": mock.ANY,
                },
            },
            "user": {
                "id": "123",
                "email": "jane.doe@example.com",
                "ip_address": "[Filtered]",
            },
            "transaction": "test_transaction",
            "transaction_info": {"source": "custom"},
            "tags": {"tag1": "tag1_value", "tag2": "tag2_value"},
            "extra": {
                "extra1": "extra1_value",
                "extra2": "extra2_value",
                "should_be_removed_by_event_scrubber": "[Filtered]",
                "sys.argv": "[Filtered]",
            },
            "breadcrumbs": {
                "values": [
                    {
                        "category": "error-level",
                        "message": "Authenticated user %s",
                        "level": "error",
                        "data": {"breadcrumb2": "somedata"},
                        "timestamp": mock.ANY,
                        "type": "default",
                    }
                ]
            },
            "modules": mock.ANY,
            "release": "0.1.2rc3",
            "environment": "checking-compatibility-with-sdk1",
            "server_name": mock.ANY,
            "sdk": {
                "name": "sentry.python",
                "version": mock.ANY,
                "packages": [{"name": "pypi:sentry-sdk", "version": mock.ANY}],
                "integrations": integrations,
            },
            "platform": "python",
            "_meta": {
                "user": {"ip_address": {"": {"rem": [["!config", "s"]]}}},
                "extra": {
                    "should_be_removed_by_event_scrubber": {
                        "": {"rem": [["!config", "s"]]}
                    },
                    "sys.argv": {"": {"rem": [["!config", "s"]]}},
                },
            },
        }

    return create_expected_error_event


@pytest.fixture
def expected_transaction(integrations):
    def create_expected_transaction_event(trx, span):
        return {
            "type": "transaction",
            "transaction": "test_transaction changed by before_send_transaction",
            "transaction_info": {"source": "custom"},
            "contexts": {
                "trace": {
                    "trace_id": trx.trace_id,
                    "span_id": trx.span_id,
                    "parent_span_id": None,
                    "op": "test_transaction_op",
                    "origin": "manual",
                    "description": None,
                    "data": {
                        "thread.id": mock.ANY,
                        "thread.name": "MainThread",
                    },
                },
                "character": {
                    "name": "Mighty Fighter changed by before_send_transaction",
                    "age": 19,
                    "attack_type": "melee",
                },
                "runtime": {
                    "name": "CPython",
                    "version": mock.ANY,
                    "build": mock.ANY,
                },
            },
            "tags": {"tag1": "tag1_value", "tag2": "tag2_value"},
            "timestamp": mock.ANY,
            "start_timestamp": mock.ANY,
            "spans": [
                {
                    "data": {
                        "thread.id": mock.ANY,
                        "thread.name": "MainThread",
                    },
                    "trace_id": trx.trace_id,
                    "span_id": span.span_id,
                    "parent_span_id": span.parent_span_id,
                    "same_process_as_parent": True,
                    "op": "test_span",
                    "origin": "manual",
                    "description": None,
                    "start_timestamp": mock.ANY,
                    "timestamp": mock.ANY,
                }
            ],
            "measurements": {"memory_used": {"value": 456, "unit": "byte"}},
            "event_id": mock.ANY,
            "level": "warning-X",
            "user": {
                "id": "123",
                "email": "jane.doe@example.com",
                "ip_address": "[Filtered]",
            },
            "extra": {
                "extra1": "extra1_value",
                "extra2": "extra2_value",
                "should_be_removed_by_event_scrubber": "[Filtered]",
                "sys.argv": "[Filtered]",
            },
            "release": "0.1.2rc3",
            "environment": "checking-compatibility-with-sdk1",
            "server_name": mock.ANY,
            "sdk": {
                "name": "sentry.python",
                "version": mock.ANY,
                "packages": [{"name": "pypi:sentry-sdk", "version": mock.ANY}],
                "integrations": integrations,
            },
            "platform": "python",
            "_meta": {
                "user": {"ip_address": {"": {"rem": [["!config", "s"]]}}},
                "extra": {
                    "should_be_removed_by_event_scrubber": {
                        "": {"rem": [["!config", "s"]]}
                    },
                    "sys.argv": {"": {"rem": [["!config", "s"]]}},
                },
            },
        }

    return create_expected_transaction_event


def _faulty_function():
    try:
        raise ValueError("This is a test exception")
    except ValueError as ex:
        sentry_sdk.capture_exception(ex)


def _test_before_send(event, hint):
    event["contexts"]["character"]["name"] += " changed by before_send"
    return event


def _test_before_send_transaction(event, hint):
    event["transaction"] += " changed by before_send_transaction"
    event["contexts"]["character"]["name"] += " changed by before_send_transaction"
    return event


def _test_before_breadcrumb(breadcrumb, hint):
    if breadcrumb["category"] == "info-level":
        return None
    return breadcrumb


def _generate_event_data(scope=None):
    """
    Generates some data to be used in the events sent by the tests.
    """
    sentry_sdk.set_level("warning-X")

    sentry_sdk.add_breadcrumb(
        category="info-level",
        message="Authenticated user %s",
        level="info",
        data={"breadcrumb1": "somedata"},
    )
    sentry_sdk.add_breadcrumb(
        category="error-level",
        message="Authenticated user %s",
        level="error",
        data={"breadcrumb2": "somedata"},
    )

    sentry_sdk.set_context(
        "character",
        {
            "name": "Mighty Fighter",
            "age": 19,
            "attack_type": "melee",
        },
    )

    sentry_sdk.set_extra("extra1", "extra1_value")
    sentry_sdk.set_extra("extra2", "extra2_value")
    sentry_sdk.set_extra("should_be_removed_by_event_scrubber", "XXX")

    sentry_sdk.set_tag("tag1", "tag1_value")
    sentry_sdk.set_tag("tag2", "tag2_value")

    sentry_sdk.set_user(
        {"id": "123", "email": "jane.doe@example.com", "ip_address": "211.161.1.124"}
    )

    sentry_sdk.set_measurement("memory_used", 456, "byte")

    if scope is not None:
        scope.add_attachment(bytes=b"Hello World", filename="hello.txt")


def _init_sentry_sdk(sentry_init):
    sentry_init(
        environment="checking-compatibility-with-sdk1",
        release="0.1.2rc3",
        before_send=_test_before_send,
        before_send_transaction=_test_before_send_transaction,
        before_breadcrumb=_test_before_breadcrumb,
        event_scrubber=EventScrubber(
            denylist=DEFAULT_DENYLIST
            + ["should_be_removed_by_event_scrubber", "sys.argv"]
        ),
        send_default_pii=False,
        traces_sample_rate=1.0,
        auto_enabling_integrations=False,
    )


#
# The actual Tests start here!
#


def test_event(sentry_init, capture_envelopes, expected_error, expected_transaction):
    _init_sentry_sdk(sentry_init)

    envelopes = capture_envelopes()

    with sentry_sdk.start_transaction(
        name="test_transaction", op="test_transaction_op"
    ) as trx:
        with sentry_sdk.start_span(op="test_span") as span:
            with sentry_sdk.configure_scope() as scope:  # configure scope
                _generate_event_data(scope)
                _faulty_function()

    (error_envelope, transaction_envelope) = envelopes

    error = error_envelope.get_event()
    transaction = transaction_envelope.get_transaction_event()
    attachment = error_envelope.items[-1]

    assert error == expected_error(trx, span)
    assert transaction == expected_transaction(trx, span)
    assert attachment.headers == {
        "filename": "hello.txt",
        "type": "attachment",
        "content_type": "text/plain",
    }
    assert attachment.payload.bytes == b"Hello World"


def test_event2(sentry_init, capture_envelopes, expected_error, expected_transaction):
    _init_sentry_sdk(sentry_init)

    envelopes = capture_envelopes()

    with Hub(Hub.current):
        sentry_sdk.set_tag("A", 1)  # will not be added

    with Hub.current:  # with hub
        with sentry_sdk.push_scope() as scope:
            scope.set_tag("B", 1)  # will not be added

        with sentry_sdk.start_transaction(
            name="test_transaction", op="test_transaction_op"
        ) as trx:
            with sentry_sdk.start_span(op="test_span") as span:
                with sentry_sdk.configure_scope() as scope:  # configure scope
                    _generate_event_data(scope)
                    _faulty_function()

    (error_envelope, transaction_envelope) = envelopes

    error = error_envelope.get_event()
    transaction = transaction_envelope.get_transaction_event()
    attachment = error_envelope.items[-1]

    assert error == expected_error(trx, span)
    assert transaction == expected_transaction(trx, span)
    assert attachment.headers == {
        "filename": "hello.txt",
        "type": "attachment",
        "content_type": "text/plain",
    }
    assert attachment.payload.bytes == b"Hello World"


def test_event3(sentry_init, capture_envelopes, expected_error, expected_transaction):
    _init_sentry_sdk(sentry_init)

    envelopes = capture_envelopes()

    with Hub(Hub.current):
        sentry_sdk.set_tag("A", 1)  # will not be added

    with Hub.current:  # with hub
        with sentry_sdk.push_scope() as scope:
            scope.set_tag("B", 1)  # will not be added

        with sentry_sdk.push_scope() as scope:  # push scope
            with sentry_sdk.start_transaction(
                name="test_transaction", op="test_transaction_op"
            ) as trx:
                with sentry_sdk.start_span(op="test_span") as span:
                    _generate_event_data(scope)
                    _faulty_function()

    (error_envelope, transaction_envelope) = envelopes

    error = error_envelope.get_event()
    transaction = transaction_envelope.get_transaction_event()
    attachment = error_envelope.items[-1]

    assert error == expected_error(trx, span)
    assert transaction == expected_transaction(trx, span)
    assert attachment.headers == {
        "filename": "hello.txt",
        "type": "attachment",
        "content_type": "text/plain",
    }
    assert attachment.payload.bytes == b"Hello World"


def test_event4(sentry_init, capture_envelopes, expected_error, expected_transaction):
    _init_sentry_sdk(sentry_init)

    envelopes = capture_envelopes()

    with Hub(Hub.current):
        sentry_sdk.set_tag("A", 1)  # will not be added

    with Hub(Hub.current):  # with hub clone
        with sentry_sdk.push_scope() as scope:
            scope.set_tag("B", 1)  # will not be added

        with sentry_sdk.start_transaction(
            name="test_transaction", op="test_transaction_op"
        ) as trx:
            with sentry_sdk.start_span(op="test_span") as span:
                with sentry_sdk.configure_scope() as scope:  # configure scope
                    _generate_event_data(scope)
                    _faulty_function()

    (error_envelope, transaction_envelope) = envelopes

    error = error_envelope.get_event()
    transaction = transaction_envelope.get_transaction_event()
    attachment = error_envelope.items[-1]

    assert error == expected_error(trx, span)
    assert transaction == expected_transaction(trx, span)
    assert attachment.headers == {
        "filename": "hello.txt",
        "type": "attachment",
        "content_type": "text/plain",
    }
    assert attachment.payload.bytes == b"Hello World"


def test_event5(sentry_init, capture_envelopes, expected_error, expected_transaction):
    _init_sentry_sdk(sentry_init)

    envelopes = capture_envelopes()

    with Hub(Hub.current):
        sentry_sdk.set_tag("A", 1)  # will not be added

    with Hub(Hub.current):  # with hub clone
        with sentry_sdk.push_scope() as scope:
            scope.set_tag("B", 1)  # will not be added

        with sentry_sdk.push_scope() as scope:  # push scope
            with sentry_sdk.start_transaction(
                name="test_transaction", op="test_transaction_op"
            ) as trx:
                with sentry_sdk.start_span(op="test_span") as span:
                    _generate_event_data(scope)
                    _faulty_function()

    (error_envelope, transaction_envelope) = envelopes

    error = error_envelope.get_event()
    transaction = transaction_envelope.get_transaction_event()
    attachment = error_envelope.items[-1]

    assert error == expected_error(trx, span)
    assert transaction == expected_transaction(trx, span)
    assert attachment.headers == {
        "filename": "hello.txt",
        "type": "attachment",
        "content_type": "text/plain",
    }
    assert attachment.payload.bytes == b"Hello World"
sentry-python-2.18.0/tests/profiler/000077500000000000000000000000001471214654000174355ustar00rootroot00000000000000sentry-python-2.18.0/tests/profiler/__init__.py000066400000000000000000000000001471214654000215340ustar00rootroot00000000000000sentry-python-2.18.0/tests/profiler/test_continuous_profiler.py000066400000000000000000000160721471214654000251640ustar00rootroot00000000000000import threading
import time
from collections import defaultdict
from unittest import mock

import pytest

import sentry_sdk
from sentry_sdk.consts import VERSION
from sentry_sdk.profiler.continuous_profiler import (
    setup_continuous_profiler,
    start_profiler,
    stop_profiler,
)
from tests.conftest import ApproxDict

try:
    import gevent
except ImportError:
    gevent = None


requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled")


def experimental_options(mode=None, auto_start=None):
    return {
        "_experiments": {
            "continuous_profiling_auto_start": auto_start,
            "continuous_profiling_mode": mode,
        }
    }


mock_sdk_info = {
    "name": "sentry.python",
    "version": VERSION,
    "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
}


@pytest.mark.parametrize("mode", [pytest.param("foo")])
@pytest.mark.parametrize(
    "make_options",
    [pytest.param(experimental_options, id="experiment")],
)
def test_continuous_profiler_invalid_mode(mode, make_options, teardown_profiling):
    with pytest.raises(ValueError):
        setup_continuous_profiler(
            make_options(mode=mode),
            mock_sdk_info,
            lambda envelope: None,
        )


@pytest.mark.parametrize(
    "mode",
    [
        pytest.param("thread"),
        pytest.param("gevent", marks=requires_gevent),
    ],
)
@pytest.mark.parametrize(
    "make_options",
    [pytest.param(experimental_options, id="experiment")],
)
def test_continuous_profiler_valid_mode(mode, make_options, teardown_profiling):
    options = make_options(mode=mode)
    setup_continuous_profiler(
        options,
        mock_sdk_info,
        lambda envelope: None,
    )


@pytest.mark.parametrize(
    "mode",
    [
        pytest.param("thread"),
        pytest.param("gevent", marks=requires_gevent),
    ],
)
@pytest.mark.parametrize(
    "make_options",
    [pytest.param(experimental_options, id="experiment")],
)
def test_continuous_profiler_setup_twice(mode, make_options, teardown_profiling):
    options = make_options(mode=mode)
    # setting up the first time should return True to indicate success
    assert setup_continuous_profiler(
        options,
        mock_sdk_info,
        lambda envelope: None,
    )
    # setting up the second time should return False to indicate no-op
    assert not setup_continuous_profiler(
        options,
        mock_sdk_info,
        lambda envelope: None,
    )


def assert_single_transaction_with_profile_chunks(envelopes, thread):
    items = defaultdict(list)
    for envelope in envelopes:
        for item in envelope.items:
            items[item.type].append(item)

    assert len(items["transaction"]) == 1
    assert len(items["profile_chunk"]) > 0

    transaction = items["transaction"][0].payload.json

    trace_context = transaction["contexts"]["trace"]

    assert trace_context == ApproxDict(
        {
            "data": ApproxDict(
                {
                    "thread.id": str(thread.ident),
                    "thread.name": thread.name,
                }
            ),
        }
    )

    profile_context = transaction["contexts"]["profile"]
    profiler_id = profile_context["profiler_id"]

    assert profile_context == ApproxDict({"profiler_id": profiler_id})

    spans = transaction["spans"]
    assert len(spans) > 0
    for span in spans:
        assert span["data"] == ApproxDict(
            {
                "profiler_id": profiler_id,
                "thread.id": str(thread.ident),
                "thread.name": thread.name,
            }
        )

    for profile_chunk_item in items["profile_chunk"]:
        profile_chunk = profile_chunk_item.payload.json
        assert profile_chunk == ApproxDict(
            {
                "client_sdk": {
                    "name": mock.ANY,
                    "version": VERSION,
                },
                "platform": "python",
                "profiler_id": profiler_id,
                "version": "2",
            }
        )


def assert_single_transaction_without_profile_chunks(envelopes):
    items = defaultdict(list)
    for envelope in envelopes:
        for item in envelope.items:
            items[item.type].append(item)

    assert len(items["transaction"]) == 1
    assert len(items["profile_chunk"]) == 0

    transaction = items["transaction"][0].payload.json
    assert "profile" not in transaction["contexts"]


@pytest.mark.forked
@pytest.mark.parametrize(
    "mode",
    [
        pytest.param("thread"),
        pytest.param("gevent", marks=requires_gevent),
    ],
)
@pytest.mark.parametrize(
    "make_options",
    [pytest.param(experimental_options, id="experiment")],
)
@mock.patch("sentry_sdk.profiler.continuous_profiler.PROFILE_BUFFER_SECONDS", 0.01)
def test_continuous_profiler_auto_start_and_manual_stop(
    sentry_init,
    capture_envelopes,
    mode,
    make_options,
    teardown_profiling,
):
    options = make_options(mode=mode, auto_start=True)
    sentry_init(
        traces_sample_rate=1.0,
        _experiments=options.get("_experiments", {}),
    )

    envelopes = capture_envelopes()

    thread = threading.current_thread()

    with sentry_sdk.start_transaction(name="profiling"):
        with sentry_sdk.start_span(op="op"):
            time.sleep(0.05)

    assert_single_transaction_with_profile_chunks(envelopes, thread)

    for _ in range(3):
        stop_profiler()

        envelopes.clear()

        with sentry_sdk.start_transaction(name="profiling"):
            with sentry_sdk.start_span(op="op"):
                time.sleep(0.05)

        assert_single_transaction_without_profile_chunks(envelopes)

        start_profiler()

        envelopes.clear()

        with sentry_sdk.start_transaction(name="profiling"):
            with sentry_sdk.start_span(op="op"):
                time.sleep(0.05)

        assert_single_transaction_with_profile_chunks(envelopes, thread)


@pytest.mark.parametrize(
    "mode",
    [
        pytest.param("thread"),
        pytest.param("gevent", marks=requires_gevent),
    ],
)
@pytest.mark.parametrize(
    "make_options",
    [pytest.param(experimental_options, id="experiment")],
)
@mock.patch("sentry_sdk.profiler.continuous_profiler.PROFILE_BUFFER_SECONDS", 0.01)
def test_continuous_profiler_manual_start_and_stop(
    sentry_init,
    capture_envelopes,
    mode,
    make_options,
    teardown_profiling,
):
    options = make_options(mode=mode)
    sentry_init(
        traces_sample_rate=1.0,
        _experiments=options.get("_experiments", {}),
    )

    envelopes = capture_envelopes()

    thread = threading.current_thread()

    for _ in range(3):
        start_profiler()

        envelopes.clear()

        with sentry_sdk.start_transaction(name="profiling"):
            with sentry_sdk.start_span(op="op"):
                time.sleep(0.05)

        assert_single_transaction_with_profile_chunks(envelopes, thread)

        stop_profiler()

        envelopes.clear()

        with sentry_sdk.start_transaction(name="profiling"):
            with sentry_sdk.start_span(op="op"):
                time.sleep(0.05)

        assert_single_transaction_without_profile_chunks(envelopes)
sentry-python-2.18.0/tests/profiler/test_transaction_profiler.py000066400000000000000000000575361471214654000253150ustar00rootroot00000000000000import inspect
import os
import sentry_sdk
import sys
import threading
import time
import warnings
from collections import defaultdict
from unittest import mock

import pytest

from sentry_sdk import start_transaction
from sentry_sdk.profiler.transaction_profiler import (
    GeventScheduler,
    Profile,
    Scheduler,
    ThreadScheduler,
    setup_profiler,
)
from sentry_sdk.profiler.utils import (
    extract_frame,
    extract_stack,
    frame_id,
    get_frame_name,
)
from sentry_sdk._lru_cache import LRUCache

try:
    import gevent
except ImportError:
    gevent = None


requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled")


def process_test_sample(sample):
    # insert a mock hashable for the stack
    return [(tid, (stack, stack)) for tid, stack in sample]


def non_experimental_options(mode=None, sample_rate=None):
    return {"profiler_mode": mode, "profiles_sample_rate": sample_rate}


def experimental_options(mode=None, sample_rate=None):
    return {
        "_experiments": {"profiler_mode": mode, "profiles_sample_rate": sample_rate}
    }


@pytest.mark.parametrize(
    "mode",
    [pytest.param("foo")],
)
@pytest.mark.parametrize(
    "make_options",
    [
        pytest.param(experimental_options, id="experiment"),
        pytest.param(non_experimental_options, id="non experimental"),
    ],
)
def test_profiler_invalid_mode(mode, make_options, teardown_profiling):
    with pytest.raises(ValueError):
        setup_profiler(make_options(mode))


@pytest.mark.parametrize(
    "mode",
    [
        pytest.param("thread"),
        pytest.param("sleep"),
        pytest.param("gevent", marks=requires_gevent),
    ],
)
@pytest.mark.parametrize(
    "make_options",
    [
        pytest.param(experimental_options, id="experiment"),
        pytest.param(non_experimental_options, id="non experimental"),
    ],
)
def test_profiler_valid_mode(mode, make_options, teardown_profiling):
    # should not raise any exceptions
    setup_profiler(make_options(mode))


@pytest.mark.parametrize(
    "make_options",
    [
        pytest.param(experimental_options, id="experiment"),
        pytest.param(non_experimental_options, id="non experimental"),
    ],
)
def test_profiler_setup_twice(make_options, teardown_profiling):
    # setting up the first time should return True to indicate success
    assert setup_profiler(make_options())
    # setting up the second time should return False to indicate no-op
    assert not setup_profiler(make_options())


@pytest.mark.parametrize(
    "mode",
    [
        pytest.param("thread"),
        pytest.param("gevent", marks=requires_gevent),
    ],
)
@pytest.mark.parametrize(
    ("profiles_sample_rate", "profile_count"),
    [
        pytest.param(1.00, 1, id="profiler sampled at 1.00"),
        pytest.param(0.75, 1, id="profiler sampled at 0.75"),
        pytest.param(0.25, 0, id="profiler sampled at 0.25"),
        pytest.param(0.00, 0, id="profiler sampled at 0.00"),
        pytest.param(None, 0, id="profiler not enabled"),
    ],
)
@pytest.mark.parametrize(
    "make_options",
    [
        pytest.param(experimental_options, id="experiment"),
        pytest.param(non_experimental_options, id="non experimental"),
    ],
)
@mock.patch("sentry_sdk.profiler.transaction_profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_profiles_sample_rate(
    sentry_init,
    capture_envelopes,
    capture_record_lost_event_calls,
    teardown_profiling,
    profiles_sample_rate,
    profile_count,
    make_options,
    mode,
):
    options = make_options(mode=mode, sample_rate=profiles_sample_rate)
    sentry_init(
        traces_sample_rate=1.0,
        profiler_mode=options.get("profiler_mode"),
        profiles_sample_rate=options.get("profiles_sample_rate"),
        _experiments=options.get("_experiments", {}),
    )

    envelopes = capture_envelopes()
    record_lost_event_calls = capture_record_lost_event_calls()

    with mock.patch(
        "sentry_sdk.profiler.transaction_profiler.random.random", return_value=0.5
    ):
        with start_transaction(name="profiling"):
            pass

    items = defaultdict(list)
    for envelope in envelopes:
        for item in envelope.items:
            items[item.type].append(item)

    assert len(items["transaction"]) == 1
    assert len(items["profile"]) == profile_count
    if profiles_sample_rate is None or profiles_sample_rate == 0:
        assert record_lost_event_calls == []
    elif profile_count:
        assert record_lost_event_calls == []
    else:
        assert record_lost_event_calls == [("sample_rate", "profile", None, 1)]


@pytest.mark.parametrize(
    "mode",
    [
        pytest.param("thread"),
        pytest.param("gevent", marks=requires_gevent),
    ],
)
@pytest.mark.parametrize(
    ("profiles_sampler", "profile_count"),
    [
        pytest.param(lambda _: 1.00, 1, id="profiler sampled at 1.00"),
        pytest.param(lambda _: 0.75, 1, id="profiler sampled at 0.75"),
        pytest.param(lambda _: 0.25, 0, id="profiler sampled at 0.25"),
        pytest.param(lambda _: 0.00, 0, id="profiler sampled at 0.00"),
        pytest.param(lambda _: None, 0, id="profiler not enabled"),
        pytest.param(
            lambda ctx: 1 if ctx["transaction_context"]["name"] == "profiling" else 0,
            1,
            id="profiler sampled for transaction name",
        ),
        pytest.param(
            lambda ctx: 0 if ctx["transaction_context"]["name"] == "profiling" else 1,
            0,
            id="profiler not sampled for transaction name",
        ),
        pytest.param(
            lambda _: "1", 0, id="profiler not sampled because string sample rate"
        ),
        pytest.param(lambda _: True, 1, id="profiler sampled at True"),
        pytest.param(lambda _: False, 0, id="profiler sampled at False"),
    ],
)
@mock.patch("sentry_sdk.profiler.transaction_profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_profiles_sampler(
    sentry_init,
    capture_envelopes,
    capture_record_lost_event_calls,
    teardown_profiling,
    profiles_sampler,
    profile_count,
    mode,
):
    sentry_init(
        traces_sample_rate=1.0,
        profiles_sampler=profiles_sampler,
    )

    envelopes = capture_envelopes()
    record_lost_event_calls = capture_record_lost_event_calls()

    with mock.patch(
        "sentry_sdk.profiler.transaction_profiler.random.random", return_value=0.5
    ):
        with start_transaction(name="profiling"):
            pass

    items = defaultdict(list)
    for envelope in envelopes:
        for item in envelope.items:
            items[item.type].append(item)

    assert len(items["transaction"]) == 1
    assert len(items["profile"]) == profile_count
    if profile_count:
        assert record_lost_event_calls == []
    else:
        assert record_lost_event_calls == [("sample_rate", "profile", None, 1)]


def test_minimum_unique_samples_required(
    sentry_init,
    capture_envelopes,
    capture_record_lost_event_calls,
    teardown_profiling,
):
    sentry_init(
        traces_sample_rate=1.0,
        _experiments={"profiles_sample_rate": 1.0},
    )

    envelopes = capture_envelopes()
    record_lost_event_calls = capture_record_lost_event_calls()

    with start_transaction(name="profiling"):
        pass

    items = defaultdict(list)
    for envelope in envelopes:
        for item in envelope.items:
            items[item.type].append(item)

    assert len(items["transaction"]) == 1
    # because we dont leave any time for the profiler to
    # take any samples, it should be not be sent
    assert len(items["profile"]) == 0
    assert record_lost_event_calls == [("insufficient_data", "profile", None, 1)]


@pytest.mark.forked
def test_profile_captured(
    sentry_init,
    capture_envelopes,
    teardown_profiling,
):
    sentry_init(
        traces_sample_rate=1.0,
        _experiments={"profiles_sample_rate": 1.0},
    )

    envelopes = capture_envelopes()

    with start_transaction(name="profiling"):
        time.sleep(0.05)

    items = defaultdict(list)
    for envelope in envelopes:
        for item in envelope.items:
            items[item.type].append(item)

    assert len(items["transaction"]) == 1
    assert len(items["profile"]) == 1


def get_frame(depth=1):
    """
    This function is not exactly true to its name. Depending on
    how it is called, the true depth of the stack can be deeper
    than the argument implies.
    """
    if depth <= 0:
        raise ValueError("only positive integers allowed")
    if depth > 1:
        return get_frame(depth=depth - 1)
    return inspect.currentframe()


class GetFrameBase:
    def inherited_instance_method(self):
        return inspect.currentframe()

    def inherited_instance_method_wrapped(self):
        def wrapped():
            return inspect.currentframe()

        return wrapped

    @classmethod
    def inherited_class_method(cls):
        return inspect.currentframe()

    @classmethod
    def inherited_class_method_wrapped(cls):
        def wrapped():
            return inspect.currentframe()

        return wrapped

    @staticmethod
    def inherited_static_method():
        return inspect.currentframe()


class GetFrame(GetFrameBase):
    def instance_method(self):
        return inspect.currentframe()

    def instance_method_wrapped(self):
        def wrapped():
            return inspect.currentframe()

        return wrapped

    @classmethod
    def class_method(cls):
        return inspect.currentframe()

    @classmethod
    def class_method_wrapped(cls):
        def wrapped():
            return inspect.currentframe()

        return wrapped

    @staticmethod
    def static_method():
        return inspect.currentframe()


@pytest.mark.parametrize(
    ("frame", "frame_name"),
    [
        pytest.param(
            get_frame(),
            "get_frame",
            id="function",
        ),
        pytest.param(
            (lambda: inspect.currentframe())(),
            "",
            id="lambda",
        ),
        pytest.param(
            GetFrame().instance_method(),
            "GetFrame.instance_method",
            id="instance_method",
        ),
        pytest.param(
            GetFrame().instance_method_wrapped()(),
            (
                "wrapped"
                if sys.version_info < (3, 11)
                else "GetFrame.instance_method_wrapped..wrapped"
            ),
            id="instance_method_wrapped",
        ),
        pytest.param(
            GetFrame().class_method(),
            "GetFrame.class_method",
            id="class_method",
        ),
        pytest.param(
            GetFrame().class_method_wrapped()(),
            (
                "wrapped"
                if sys.version_info < (3, 11)
                else "GetFrame.class_method_wrapped..wrapped"
            ),
            id="class_method_wrapped",
        ),
        pytest.param(
            GetFrame().static_method(),
            "static_method" if sys.version_info < (3, 11) else "GetFrame.static_method",
            id="static_method",
        ),
        pytest.param(
            GetFrame().inherited_instance_method(),
            "GetFrameBase.inherited_instance_method",
            id="inherited_instance_method",
        ),
        pytest.param(
            GetFrame().inherited_instance_method_wrapped()(),
            (
                "wrapped"
                if sys.version_info < (3, 11)
                else "GetFrameBase.inherited_instance_method_wrapped..wrapped"
            ),
            id="instance_method_wrapped",
        ),
        pytest.param(
            GetFrame().inherited_class_method(),
            "GetFrameBase.inherited_class_method",
            id="inherited_class_method",
        ),
        pytest.param(
            GetFrame().inherited_class_method_wrapped()(),
            (
                "wrapped"
                if sys.version_info < (3, 11)
                else "GetFrameBase.inherited_class_method_wrapped..wrapped"
            ),
            id="inherited_class_method_wrapped",
        ),
        pytest.param(
            GetFrame().inherited_static_method(),
            (
                "inherited_static_method"
                if sys.version_info < (3, 11)
                else "GetFrameBase.inherited_static_method"
            ),
            id="inherited_static_method",
        ),
    ],
)
def test_get_frame_name(frame, frame_name):
    assert get_frame_name(frame) == frame_name


@pytest.mark.parametrize(
    ("get_frame", "function"),
    [
        pytest.param(lambda: get_frame(depth=1), "get_frame", id="simple"),
    ],
)
def test_extract_frame(get_frame, function):
    cwd = os.getcwd()
    frame = get_frame()
    extracted_frame = extract_frame(frame_id(frame), frame, cwd)

    # the abs_path should be equal toe the normalized path of the co_filename
    assert extracted_frame["abs_path"] == os.path.normpath(frame.f_code.co_filename)

    # the module should be pull from this test module
    assert extracted_frame["module"] == __name__

    # the filename should be the file starting after the cwd
    assert extracted_frame["filename"] == __file__[len(cwd) + 1 :]

    assert extracted_frame["function"] == function

    # the lineno will shift over time as this file is modified so just check
    # that it is an int
    assert isinstance(extracted_frame["lineno"], int)


@pytest.mark.parametrize(
    ("depth", "max_stack_depth", "actual_depth"),
    [
        pytest.param(1, 128, 1, id="less than"),
        pytest.param(256, 128, 128, id="greater than"),
        pytest.param(128, 128, 128, id="equals"),
    ],
)
def test_extract_stack_with_max_depth(depth, max_stack_depth, actual_depth):
    # introduce a lambda that we'll be looking for in the stack
    frame = (lambda: get_frame(depth=depth))()

    # plus 1 because we introduced a lambda intentionally that we'll
    # look for in the final stack to make sure its in the right position
    base_stack_depth = len(inspect.stack()) + 1

    # increase the max_depth by the `base_stack_depth` to account
    # for the extra frames pytest will add
    _, frame_ids, frames = extract_stack(
        frame,
        LRUCache(max_size=1),
        max_stack_depth=max_stack_depth + base_stack_depth,
        cwd=os.getcwd(),
    )
    assert len(frame_ids) == base_stack_depth + actual_depth
    assert len(frames) == base_stack_depth + actual_depth

    for i in range(actual_depth):
        assert frames[i]["function"] == "get_frame", i

    # index 0 contains the inner most frame on the stack, so the lamdba
    # should be at index `actual_depth`
    if sys.version_info >= (3, 11):
        assert (
            frames[actual_depth]["function"]
            == "test_extract_stack_with_max_depth.."
        ), actual_depth
    else:
        assert frames[actual_depth]["function"] == "", actual_depth


@pytest.mark.parametrize(
    ("frame", "depth"),
    [(get_frame(depth=1), len(inspect.stack()))],
)
def test_extract_stack_with_cache(frame, depth):
    # make sure cache has enough room or this test will fail
    cache = LRUCache(max_size=depth)
    cwd = os.getcwd()
    _, _, frames1 = extract_stack(frame, cache, cwd=cwd)
    _, _, frames2 = extract_stack(frame, cache, cwd=cwd)

    assert len(frames1) > 0
    assert len(frames2) > 0
    assert len(frames1) == len(frames2)
    for i, (frame1, frame2) in enumerate(zip(frames1, frames2)):
        # DO NOT use `==` for the assertion here since we are
        # testing for identity, and using `==` would test for
        # equality which would always pass since we're extract
        # the same stack.
        assert frame1 is frame2, i


def get_scheduler_threads(scheduler):
    return [thread for thread in threading.enumerate() if thread.name == scheduler.name]


@pytest.mark.parametrize(
    ("scheduler_class",),
    [
        pytest.param(ThreadScheduler, id="thread scheduler"),
        pytest.param(
            GeventScheduler,
            marks=[
                requires_gevent,
                pytest.mark.skip(
                    reason="cannot find this thread via threading.enumerate()"
                ),
            ],
            id="gevent scheduler",
        ),
    ],
)
def test_thread_scheduler_single_background_thread(scheduler_class):
    scheduler = scheduler_class(frequency=1000)

    # not yet setup, no scheduler threads yet
    assert len(get_scheduler_threads(scheduler)) == 0

    scheduler.setup()

    # setup but no profiles started so still no threads
    assert len(get_scheduler_threads(scheduler)) == 0

    scheduler.ensure_running()

    # the scheduler will start always 1 thread
    assert len(get_scheduler_threads(scheduler)) == 1

    scheduler.ensure_running()

    # the scheduler still only has 1 thread
    assert len(get_scheduler_threads(scheduler)) == 1

    scheduler.teardown()

    # once finished, the thread should stop
    assert len(get_scheduler_threads(scheduler)) == 0


@pytest.mark.parametrize(
    ("scheduler_class",),
    [
        pytest.param(ThreadScheduler, id="thread scheduler"),
        pytest.param(
            GeventScheduler,
            marks=[
                requires_gevent,
                pytest.mark.skip(
                    reason="cannot find this thread via threading.enumerate()"
                ),
            ],
            id="gevent scheduler",
        ),
    ],
)
def test_thread_scheduler_no_thread_on_shutdown(scheduler_class):
    scheduler = scheduler_class(frequency=1000)

    # not yet setup, no scheduler threads yet
    assert len(get_scheduler_threads(scheduler)) == 0

    scheduler.setup()

    # setup but no profiles started so still no threads
    assert len(get_scheduler_threads(scheduler)) == 0

    # mock RuntimeError as if the 3.12 intepreter was shutting down
    with mock.patch(
        "threading.Thread.start",
        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
    ):
        scheduler.ensure_running()

    assert scheduler.running is False

    # still no thread
    assert len(get_scheduler_threads(scheduler)) == 0

    scheduler.teardown()

    assert len(get_scheduler_threads(scheduler)) == 0


@pytest.mark.parametrize(
    ("scheduler_class",),
    [
        pytest.param(ThreadScheduler, id="thread scheduler"),
        pytest.param(GeventScheduler, marks=requires_gevent, id="gevent scheduler"),
    ],
)
@mock.patch("sentry_sdk.profiler.transaction_profiler.MAX_PROFILE_DURATION_NS", 1)
def test_max_profile_duration_reached(scheduler_class):
    sample = [
        (
            "1",
            extract_stack(
                get_frame(),
                LRUCache(max_size=1),
                cwd=os.getcwd(),
            ),
        ),
    ]

    with scheduler_class(frequency=1000) as scheduler:
        with Profile(True, 0, scheduler=scheduler) as profile:
            # profile just started, it's active
            assert profile.active

            # write a sample at the start time, so still active
            profile.write(profile.start_ns + 0, sample)
            assert profile.active

            # write a sample at max time, so still active
            profile.write(profile.start_ns + 1, sample)
            assert profile.active

            # write a sample PAST the max time, so now inactive
            profile.write(profile.start_ns + 2, sample)
            assert not profile.active


class NoopScheduler(Scheduler):
    def setup(self):
        # type: () -> None
        pass

    def teardown(self):
        # type: () -> None
        pass

    def ensure_running(self):
        # type: () -> None
        pass


current_thread = threading.current_thread()
thread_metadata = {
    str(current_thread.ident): {
        "name": str(current_thread.name),
    },
}


sample_stacks = [
    extract_stack(
        get_frame(),
        LRUCache(max_size=1),
        max_stack_depth=1,
        cwd=os.getcwd(),
    ),
    extract_stack(
        get_frame(),
        LRUCache(max_size=1),
        max_stack_depth=2,
        cwd=os.getcwd(),
    ),
]


@pytest.mark.parametrize(
    ("samples", "expected"),
    [
        pytest.param(
            [],
            {
                "frames": [],
                "samples": [],
                "stacks": [],
                "thread_metadata": thread_metadata,
            },
            id="empty",
        ),
        pytest.param(
            [(6, [("1", sample_stacks[0])])],
            {
                "frames": [],
                "samples": [],
                "stacks": [],
                "thread_metadata": thread_metadata,
            },
            id="single sample out of range",
        ),
        pytest.param(
            [(0, [("1", sample_stacks[0])])],
            {
                "frames": [sample_stacks[0][2][0]],
                "samples": [
                    {
                        "elapsed_since_start_ns": "0",
                        "thread_id": "1",
                        "stack_id": 0,
                    },
                ],
                "stacks": [[0]],
                "thread_metadata": thread_metadata,
            },
            id="single sample in range",
        ),
        pytest.param(
            [
                (0, [("1", sample_stacks[0])]),
                (1, [("1", sample_stacks[0])]),
            ],
            {
                "frames": [sample_stacks[0][2][0]],
                "samples": [
                    {
                        "elapsed_since_start_ns": "0",
                        "thread_id": "1",
                        "stack_id": 0,
                    },
                    {
                        "elapsed_since_start_ns": "1",
                        "thread_id": "1",
                        "stack_id": 0,
                    },
                ],
                "stacks": [[0]],
                "thread_metadata": thread_metadata,
            },
            id="two identical stacks",
        ),
        pytest.param(
            [
                (0, [("1", sample_stacks[0])]),
                (1, [("1", sample_stacks[1])]),
            ],
            {
                "frames": [
                    sample_stacks[0][2][0],
                    sample_stacks[1][2][0],
                ],
                "samples": [
                    {
                        "elapsed_since_start_ns": "0",
                        "thread_id": "1",
                        "stack_id": 0,
                    },
                    {
                        "elapsed_since_start_ns": "1",
                        "thread_id": "1",
                        "stack_id": 1,
                    },
                ],
                "stacks": [[0], [1, 0]],
                "thread_metadata": thread_metadata,
            },
            id="two different stacks",
        ),
    ],
)
@mock.patch("sentry_sdk.profiler.transaction_profiler.MAX_PROFILE_DURATION_NS", 5)
def test_profile_processing(
    DictionaryContaining,  # noqa: N803
    samples,
    expected,
):
    with NoopScheduler(frequency=1000) as scheduler:
        with Profile(True, 0, scheduler=scheduler) as profile:
            for ts, sample in samples:
                # force the sample to be written at a time relative to the
                # start of the profile
                now = profile.start_ns + ts
                profile.write(now, sample)

            processed = profile.process()

            assert processed["thread_metadata"] == DictionaryContaining(
                expected["thread_metadata"]
            )
            assert processed["frames"] == expected["frames"]
            assert processed["stacks"] == expected["stacks"]
            assert processed["samples"] == expected["samples"]


def test_hub_backwards_compatibility(suppress_deprecation_warnings):
    hub = sentry_sdk.Hub()

    with pytest.warns(DeprecationWarning):
        profile = Profile(True, 0, hub=hub)

    with pytest.warns(DeprecationWarning):
        assert profile.hub is hub

    new_hub = sentry_sdk.Hub()

    with pytest.warns(DeprecationWarning):
        profile.hub = new_hub

    with pytest.warns(DeprecationWarning):
        assert profile.hub is new_hub


def test_no_warning_without_hub():
    with warnings.catch_warnings():
        warnings.simplefilter("error")
        Profile(True, 0)
sentry-python-2.18.0/tests/test.key000066400000000000000000000063101471214654000173040ustar00rootroot00000000000000-----BEGIN PRIVATE KEY-----
MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCNSgCTO5Pc7o21
BfvfDv/UDwDydEhInosNG7lgumqelT4dyJcYWoiDYAZ8zf6mlPFaw3oYouq+nQo/
Z5eRNQD6AxhXw86qANjcfs1HWoP8d7jgR+ZelrshadvBBGYUJhiDkjUWb8jU7b9M
28z5m4SA5enfSrQYZfVlrX8MFxV70ws5duLye92FYjpqFBWeeGtmsw1iWUO020Nj
bbngpcRmRiBq41KuPydD8IWWQteoOVAI3U2jwEI2foAkXTHB+kQF//NtUWz5yiZY
4ugjY20p0t8Asom1oDK9pL2Qy4EQpsCev/6SJ+o7sK6oR1gyrzodn6hcqJbqcXvp
Y6xgXIO02H8wn7e3NkAJZkfFWJAyIslYrurMcnZwDaLpzL35vyULseOtDfsWQ3yq
TflXHcA2Zlujuv7rmq6Q+GCaLJxbmj5bPUvv8DAARd97BXf57s6C9srT8kk5Ekbf
URWRiO8j5XDLPyqsaP1c/pMPee1CGdtY6gf9EDWgmivgAYvH27pqzKh0JJAsmJ8p
1Zp5xFMtEkzoTlKL2jqeyS6zBO/o+9MHJld5OHcUvlWm767vKKe++aV2IA3h9nBQ
vmbCQ9i0ufGXZYZtJUYk6T8EMLclvtQz4yLRAYx0PLFOKfi1pAfDAHBFEfwWmuCk
cYqw8erbbfoj0qpnuDEj45iUtH5gRwIDAQABAoICADqdqfFrNSPiYC3qxpy6x039
z4HG1joydDPC/bxwek1CU1vd3TmATcRbMTXT7ELF5f+mu1+/Ly5XTmoRmyLl33rZ
j97RYErNQSrw/E8O8VTrgmqhyaQSWp45Ia9JGORhDaiAHsApLiOQYt4LDlW7vFQR
jl5RyreYjR9axCuK5CHT44M6nFrHIpb0spFRtcph4QThYbscl2dP0/xLCGN3wixA
CbDukF2z26FnBrTZFEk5Rcf3r/8wgwfCoXz0oPD91/y5PA9tSY2z3QbhVDdiR2aj
klritxj/1i0xTGfm1avH0n/J3V5bauTKnxs3RhL4+V5S33FZjArFfAfOjzQHDah6
nqz43dAOf83QYreMivxyAnQvU3Cs+J4RKYUsIQzsLpRs/2Wb7nK3W/p+bLdRIl04
Y+xcX+3aKBluKoVMh7CeQDtr8NslSNO+YfGNmGYfD2f05da1Wi+FWqTrXXY2Y/NB
3VJDLgMuNgT5nsimrCl6ZfNcBtyDhsCUPN9V8sGZooEnjG0eNIX/OO3mlEI5GXfY
oFoXsjPX53aYZkOPVZLdXq0IteKGCFZCBhDVOmAqgALlVl66WbO+pMlBB+L7aw/h
H1NlBmrzfOXlYZi8SbmO0DSqC0ckXZCSdbmjix9aOhpDk/NlUZF29xCfQ5Mwk4gk
FboJIKDa0kKXQB18UV4ZAoIBAQC/LX97kOa1YibZIYdkyo0BD8jgjXZGV3y0Lc5V
h5mjOUD2mQ2AE9zcKtfjxEBnFYcC5RFe88vWBuYyLpVdDuZeiAfQHP4bXT+QZRBi
p51PjMuC+5zd5XlGeU5iwnfJ6TBe0yVfSb7M2N88LEeBaVCRcP7rqyiSYnwVkaHN
9Ow1PwJ4BiX0wIn62fO6o6CDo8x9KxXK6G+ak5z83AFSV8+ZGjHMEYcLaVfOj8a2
VFbc2eX1V0ebgJOZVx8eAgjLV6fJahJ1/lT+8y9CzHtS7b3RvU/EsD+7WLMFUxHJ
cPVL6/iHBsV8heKxFfdORSBtBgllQjzv6rzuJ2rZDqQBZF0TAoIBAQC9MhjeEtNw
J8jrnsfg5fDJMPCg5nvb6Ck3z2FyDPJInK+b/IPvcrDl/+X+1vHhmGf5ReLZuEPR
0YEeAWbdMiKJbgRyca5xWRWgP7+sIFmJ9Calvf0FfFzaKQHyLAepBuVp5JMCqqTc
9Rw+5X5MjRgQxvJRppO/EnrvJ3/ZPJEhvYaSqvFQpYR4U0ghoQSlSxoYwCNuKSga
EmpItqZ1j6bKCxy/TZbYgM2SDoSzsD6h/hlLLIU6ecIsBPrF7C+rwxasbLLomoCD
RqjCjsLsgiQU9Qmg01ReRWjXa64r0JKGU0gb+E365WJHqPQgyyhmeYhcXhhUCj+B
Anze8CYU8xp9AoIBAFOpjYh9uPjXoziSO7YYDezRA4+BWKkf0CrpgMpdNRcBDzTb
ddT+3EBdX20FjUmPWi4iIJ/1ANcA3exIBoVa5+WmkgS5K1q+S/rcv3bs8yLE8qq3
gcZ5jcERhQQjJljt+4UD0e8JTr5GiirDFefENsXvNR/dHzwwbSzjNnPzIwuKL4Jm
7mVVfQySJN8gjDYPkIWWPUs2vOBgiOr/PHTUiLzvgatUYEzWJN74fHV+IyUzFjdv
op6iffU08yEmssKJ8ZtrF/ka/Ac2VRBee/mmoNMQjb/9gWZzQqSp3bbSAAbhlTlB
9VqxHKtyeW9/QNl1MtdlTVWQ3G08Qr4KcitJyJECggEAL3lrrgXxUnpZO26bXz6z
vfhu2SEcwWCvPxblr9W50iinFDA39xTDeONOljTfeylgJbe4pcNMGVFF4f6eDjEv
Y2bc7M7D5CNjftOgSBPSBADk1cAnxoGfVwrlNxx/S5W0aW72yLuDJQLIdKvnllPt
TwBs+7od5ts/R9WUijFdhabmJtWIOiFebUcQmYeq/8MpqD5GZbUkH+6xBs/2UxeZ
1acWLpbMnEUt0FGeUOyPutxlAm0IfVTiOWOCfbm3eJU6kkewWRez2b0YScHC/c/m
N/AI23dL+1/VYADgMpRiwBwTwxj6kFOQ5sRphfUUjSo/4lWmKyhrKPcz2ElQdP9P
jQKCAQEAqsAD7r443DklL7oPR/QV0lrjv11EtXcZ0Gff7ZF2FI1V/CxkbYolPrB+
QPSjwcMtyzxy6tXtUnaH19gx/K/8dBO/vnBw1Go/tvloIXidvVE0wemEC+gpTVtP
fLVplwBhcyxOMMGJcqbIT62pzSUisyXeb8dGn27BOUqz69u+z+MKdHDMM/loKJbj
TRw8MB8+t51osJ/tA3SwQCzS4onUMmwqE9eVHspANQeWZVqs+qMtpwW0lvs909Wv
VZ1o9pRPv2G9m7aK4v/bZO56DOx+9/Rp+mv3S2zl2Pkd6RIuD0UR4v03bRz3ACpf
zQTVuucYfxc1ph7H0ppUOZQNZ1Fo7w==
-----END PRIVATE KEY-----
sentry-python-2.18.0/tests/test.pem000066400000000000000000000034321471214654000172770ustar00rootroot00000000000000-----BEGIN CERTIFICATE-----
MIIFETCCAvkCFEtmfMHeEvO+RUV9Qx0bkr7VWpdSMA0GCSqGSIb3DQEBCwUAMEUx
CzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRl
cm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMjQwOTE3MjEwNDE1WhcNMjUwOTE3MjEw
NDE1WjBFMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UE
CgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOC
Ag8AMIICCgKCAgEAjUoAkzuT3O6NtQX73w7/1A8A8nRISJ6LDRu5YLpqnpU+HciX
GFqIg2AGfM3+ppTxWsN6GKLqvp0KP2eXkTUA+gMYV8POqgDY3H7NR1qD/He44Efm
Xpa7IWnbwQRmFCYYg5I1Fm/I1O2/TNvM+ZuEgOXp30q0GGX1Za1/DBcVe9MLOXbi
8nvdhWI6ahQVnnhrZrMNYllDtNtDY2254KXEZkYgauNSrj8nQ/CFlkLXqDlQCN1N
o8BCNn6AJF0xwfpEBf/zbVFs+comWOLoI2NtKdLfALKJtaAyvaS9kMuBEKbAnr/+
kifqO7CuqEdYMq86HZ+oXKiW6nF76WOsYFyDtNh/MJ+3tzZACWZHxViQMiLJWK7q
zHJ2cA2i6cy9+b8lC7HjrQ37FkN8qk35Vx3ANmZbo7r+65qukPhgmiycW5o+Wz1L
7/AwAEXfewV3+e7OgvbK0/JJORJG31EVkYjvI+Vwyz8qrGj9XP6TD3ntQhnbWOoH
/RA1oJor4AGLx9u6asyodCSQLJifKdWaecRTLRJM6E5Si9o6nskuswTv6PvTByZX
eTh3FL5Vpu+u7yinvvmldiAN4fZwUL5mwkPYtLnxl2WGbSVGJOk/BDC3Jb7UM+Mi
0QGMdDyxTin4taQHwwBwRRH8FprgpHGKsPHq2236I9KqZ7gxI+OYlLR+YEcCAwEA
ATANBgkqhkiG9w0BAQsFAAOCAgEAgFVmFmk7duJRYqktcc4/qpbGUQTaalcjBvMQ
SnTS0l3WNTwOeUBbCR6V72LOBhRG1hqsQJIlXFIuoFY7WbQoeHciN58abwXan3N+
4Kzuue5oFdj2AK9UTSKE09cKHoBD5uwiuU1oMGRxvq0+nUaJMoC333TNBXlIFV6K
SZFfD+MpzoNdn02PtjSBzsu09szzC+r8ZyKUwtG6xTLRBA8vrukWgBYgn9CkniJk
gLw8z5FioOt8ISEkAqvtyfJPi0FkUBb/vFXwXaaM8Vvn++ssYiUes0K5IzF+fQ5l
Bv8PIkVXFrNKuvzUgpO9IaUuQavSHFC0w0FEmbWsku7UxgPvLFPqmirwcnrkQjVR
eyE25X2Sk6AucnfIFGUvYPcLGJ71Z8mjH0baB2a/zo8vnWR1rqiUfptNomm42WMm
PaprIC0684E0feT+cqbN+LhBT9GqXpaG3emuguxSGMkff4RtPv/3DOFNk9KAIK8i
7GWCBjW5GF7mkTdQtYqVi1d87jeuGZ1InF1FlIZaswWGeG6Emml+Gxa50Z7Kpmc7
f2vZlg9E8kmbRttCVUx4kx5PxKOI6s/ebKTFbHO+ZXJtm8MyOTrAJLfnFo4SUA90
zX6CzyP1qu1/qdf9+kT0o0JeEsqg+0f4yhp3x/xH5OsAlUpRHvRr2aB3ZYi/4Vwj
53fMNXk=
-----END CERTIFICATE-----
sentry-python-2.18.0/tests/test_ai_monitoring.py000066400000000000000000000071411471214654000220650ustar00rootroot00000000000000import pytest

import sentry_sdk
from sentry_sdk.ai.monitoring import ai_track


def test_ai_track(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    @ai_track("my tool")
    def tool(**kwargs):
        pass

    @ai_track("some test pipeline")
    def pipeline():
        tool()

    with sentry_sdk.start_transaction():
        pipeline()

    transaction = events[0]
    assert transaction["type"] == "transaction"
    assert len(transaction["spans"]) == 2
    spans = transaction["spans"]

    ai_pipeline_span = spans[0] if spans[0]["op"] == "ai.pipeline" else spans[1]
    ai_run_span = spans[0] if spans[0]["op"] == "ai.run" else spans[1]

    assert ai_pipeline_span["description"] == "some test pipeline"
    assert ai_run_span["description"] == "my tool"


def test_ai_track_with_tags(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    @ai_track("my tool")
    def tool(**kwargs):
        pass

    @ai_track("some test pipeline")
    def pipeline():
        tool()

    with sentry_sdk.start_transaction():
        pipeline(sentry_tags={"user": "colin"}, sentry_data={"some_data": "value"})

    transaction = events[0]
    assert transaction["type"] == "transaction"
    assert len(transaction["spans"]) == 2
    spans = transaction["spans"]

    ai_pipeline_span = spans[0] if spans[0]["op"] == "ai.pipeline" else spans[1]
    ai_run_span = spans[0] if spans[0]["op"] == "ai.run" else spans[1]

    assert ai_pipeline_span["description"] == "some test pipeline"
    print(ai_pipeline_span)
    assert ai_pipeline_span["tags"]["user"] == "colin"
    assert ai_pipeline_span["data"]["some_data"] == "value"
    assert ai_run_span["description"] == "my tool"


@pytest.mark.asyncio
async def test_ai_track_async(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    @ai_track("my async tool")
    async def async_tool(**kwargs):
        pass

    @ai_track("some async test pipeline")
    async def async_pipeline():
        await async_tool()

    with sentry_sdk.start_transaction():
        await async_pipeline()

    transaction = events[0]
    assert transaction["type"] == "transaction"
    assert len(transaction["spans"]) == 2
    spans = transaction["spans"]

    ai_pipeline_span = spans[0] if spans[0]["op"] == "ai.pipeline" else spans[1]
    ai_run_span = spans[0] if spans[0]["op"] == "ai.run" else spans[1]

    assert ai_pipeline_span["description"] == "some async test pipeline"
    assert ai_run_span["description"] == "my async tool"


@pytest.mark.asyncio
async def test_ai_track_async_with_tags(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    @ai_track("my async tool")
    async def async_tool(**kwargs):
        pass

    @ai_track("some async test pipeline")
    async def async_pipeline():
        await async_tool()

    with sentry_sdk.start_transaction():
        await async_pipeline(
            sentry_tags={"user": "czyber"}, sentry_data={"some_data": "value"}
        )

    transaction = events[0]
    assert transaction["type"] == "transaction"
    assert len(transaction["spans"]) == 2
    spans = transaction["spans"]

    ai_pipeline_span = spans[0] if spans[0]["op"] == "ai.pipeline" else spans[1]
    ai_run_span = spans[0] if spans[0]["op"] == "ai.run" else spans[1]

    assert ai_pipeline_span["description"] == "some async test pipeline"
    assert ai_pipeline_span["tags"]["user"] == "czyber"
    assert ai_pipeline_span["data"]["some_data"] == "value"
    assert ai_run_span["description"] == "my async tool"
sentry-python-2.18.0/tests/test_api.py000066400000000000000000000122311471214654000177740ustar00rootroot00000000000000import pytest
from unittest import mock

from sentry_sdk import (
    capture_exception,
    continue_trace,
    get_baggage,
    get_client,
    get_current_span,
    get_traceparent,
    is_initialized,
    start_transaction,
    set_tags,
    configure_scope,
    push_scope,
    get_global_scope,
    get_current_scope,
    get_isolation_scope,
)

from sentry_sdk.client import Client, NonRecordingClient


@pytest.mark.forked
def test_get_current_span():
    fake_scope = mock.MagicMock()
    fake_scope.span = mock.MagicMock()
    assert get_current_span(fake_scope) == fake_scope.span

    fake_scope.span = None
    assert get_current_span(fake_scope) is None


@pytest.mark.forked
def test_get_current_span_default_hub(sentry_init):
    sentry_init()

    assert get_current_span() is None

    scope = get_current_scope()
    fake_span = mock.MagicMock()
    scope.span = fake_span

    assert get_current_span() == fake_span


@pytest.mark.forked
def test_get_current_span_default_hub_with_transaction(sentry_init):
    sentry_init()

    assert get_current_span() is None

    with start_transaction() as new_transaction:
        assert get_current_span() == new_transaction


@pytest.mark.forked
def test_traceparent_with_tracing_enabled(sentry_init):
    sentry_init(traces_sample_rate=1.0)

    with start_transaction() as transaction:
        expected_traceparent = "%s-%s-1" % (
            transaction.trace_id,
            transaction.span_id,
        )
        assert get_traceparent() == expected_traceparent


@pytest.mark.forked
def test_traceparent_with_tracing_disabled(sentry_init):
    sentry_init()

    propagation_context = get_isolation_scope()._propagation_context
    expected_traceparent = "%s-%s" % (
        propagation_context.trace_id,
        propagation_context.span_id,
    )
    assert get_traceparent() == expected_traceparent


@pytest.mark.forked
def test_baggage_with_tracing_disabled(sentry_init):
    sentry_init(release="1.0.0", environment="dev")
    propagation_context = get_isolation_scope()._propagation_context
    expected_baggage = (
        "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0".format(
            propagation_context.trace_id
        )
    )
    assert get_baggage() == expected_baggage


@pytest.mark.forked
def test_baggage_with_tracing_enabled(sentry_init):
    sentry_init(traces_sample_rate=1.0, release="1.0.0", environment="dev")
    with start_transaction() as transaction:
        expected_baggage = "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled={}".format(
            transaction.trace_id, "true" if transaction.sampled else "false"
        )
        assert get_baggage() == expected_baggage


@pytest.mark.forked
def test_continue_trace(sentry_init):
    sentry_init()

    trace_id = "471a43a4192642f0b136d5159a501701"
    parent_span_id = "6e8f22c393e68f19"
    parent_sampled = 1
    transaction = continue_trace(
        {
            "sentry-trace": "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled),
            "baggage": "sentry-trace_id=566e3688a61d4bc888951642d6f14a19",
        },
        name="some name",
    )
    with start_transaction(transaction):
        assert transaction.name == "some name"

        propagation_context = get_isolation_scope()._propagation_context
        assert propagation_context.trace_id == transaction.trace_id == trace_id
        assert propagation_context.parent_span_id == parent_span_id
        assert propagation_context.parent_sampled == parent_sampled
        assert propagation_context.dynamic_sampling_context == {
            "trace_id": "566e3688a61d4bc888951642d6f14a19"
        }


@pytest.mark.forked
def test_is_initialized():
    assert not is_initialized()

    scope = get_global_scope()
    scope.set_client(Client())
    assert is_initialized()


@pytest.mark.forked
def test_get_client():
    client = get_client()
    assert client is not None
    assert client.__class__ == NonRecordingClient
    assert not client.is_active()


def raise_and_capture():
    """Raise an exception and capture it.

    This is a utility function for test_set_tags.
    """
    try:
        1 / 0
    except ZeroDivisionError:
        capture_exception()


def test_set_tags(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    set_tags({"tag1": "value1", "tag2": "value2"})
    raise_and_capture()

    (*_, event) = events
    assert event["tags"] == {"tag1": "value1", "tag2": "value2"}, "Setting tags failed"

    set_tags({"tag2": "updated", "tag3": "new"})
    raise_and_capture()

    (*_, event) = events
    assert event["tags"] == {
        "tag1": "value1",
        "tag2": "updated",
        "tag3": "new",
    }, "Updating tags failed"

    set_tags({})
    raise_and_capture()

    (*_, event) = events
    assert event["tags"] == {
        "tag1": "value1",
        "tag2": "updated",
        "tag3": "new",
    }, "Updating tags with empty dict changed tags"


def test_configure_scope_deprecation():
    with pytest.warns(DeprecationWarning):
        with configure_scope():
            ...


def test_push_scope_deprecation():
    with pytest.warns(DeprecationWarning):
        with push_scope():
            ...
sentry-python-2.18.0/tests/test_basics.py000066400000000000000000000747761471214654000205150ustar00rootroot00000000000000import datetime
import importlib
import logging
import os
import sys
import time
from collections import Counter

import pytest
from sentry_sdk.client import Client
from sentry_sdk.utils import datetime_from_isoformat
from tests.conftest import patch_start_tracing_child

import sentry_sdk
import sentry_sdk.scope
from sentry_sdk import (
    get_client,
    push_scope,
    capture_event,
    capture_exception,
    capture_message,
    start_transaction,
    last_event_id,
    add_breadcrumb,
    isolation_scope,
    new_scope,
    Hub,
)
from sentry_sdk.integrations import (
    _AUTO_ENABLING_INTEGRATIONS,
    _DEFAULT_INTEGRATIONS,
    DidNotEnable,
    Integration,
    setup_integrations,
)
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.integrations.stdlib import StdlibIntegration
from sentry_sdk.scope import add_global_event_processor
from sentry_sdk.utils import get_sdk_name, reraise
from sentry_sdk.tracing_utils import has_tracing_enabled


class NoOpIntegration(Integration):
    """
    A simple no-op integration for testing purposes.
    """

    identifier = "noop"

    @staticmethod
    def setup_once():  # type: () -> None
        pass

    def __eq__(self, __value):  # type: (object) -> bool
        """
        All instances of NoOpIntegration should be considered equal to each other.
        """
        return type(__value) == type(self)


def test_processors(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    def error_processor(event, exc_info):
        event["exception"]["values"][0]["value"] += " whatever"
        return event

    sentry_sdk.get_isolation_scope().add_error_processor(error_processor, ValueError)

    try:
        raise ValueError("aha!")
    except Exception:
        capture_exception()

    (event,) = events

    assert event["exception"]["values"][0]["value"] == "aha! whatever"


class ModuleImportErrorSimulator:
    def __init__(self, modules, error_cls=DidNotEnable):
        self.modules = modules
        self.error_cls = error_cls
        for sys_module in list(sys.modules.keys()):
            if any(sys_module.startswith(module) for module in modules):
                del sys.modules[sys_module]

    def find_spec(self, fullname, _path, _target=None):
        if fullname in self.modules:
            raise self.error_cls("Test import failure for %s" % fullname)

    def __enter__(self):
        # WARNING: We need to be first to avoid pytest messing with local imports
        sys.meta_path.insert(0, self)

    def __exit__(self, *_args):
        sys.meta_path.remove(self)


def test_auto_enabling_integrations_catches_import_error(sentry_init, caplog):
    caplog.set_level(logging.DEBUG)

    with ModuleImportErrorSimulator(
        [i.rsplit(".", 1)[0] for i in _AUTO_ENABLING_INTEGRATIONS]
    ):
        sentry_init(auto_enabling_integrations=True, debug=True)

    for import_string in _AUTO_ENABLING_INTEGRATIONS:
        assert any(
            record.message.startswith(
                "Did not import default integration {}:".format(import_string)
            )
            for record in caplog.records
        ), "Problem with checking auto enabling {}".format(import_string)


def test_generic_mechanism(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        raise ValueError("aha!")
    except Exception:
        capture_exception()

    (event,) = events
    assert event["exception"]["values"][0]["mechanism"]["type"] == "generic"
    assert event["exception"]["values"][0]["mechanism"]["handled"]


def test_option_before_send(sentry_init, capture_events):
    def before_send(event, hint):
        event["extra"] = {"before_send_called": True}
        return event

    def do_this():
        try:
            raise ValueError("aha!")
        except Exception:
            capture_exception()

    sentry_init(before_send=before_send)
    events = capture_events()

    do_this()

    (event,) = events
    assert event["extra"] == {"before_send_called": True}


def test_option_before_send_discard(sentry_init, capture_events):
    def before_send_discard(event, hint):
        return None

    def do_this():
        try:
            raise ValueError("aha!")
        except Exception:
            capture_exception()

    sentry_init(before_send=before_send_discard)
    events = capture_events()

    do_this()

    assert len(events) == 0


def test_option_before_send_transaction(sentry_init, capture_events):
    def before_send_transaction(event, hint):
        assert event["type"] == "transaction"
        event["extra"] = {"before_send_transaction_called": True}
        return event

    sentry_init(
        before_send_transaction=before_send_transaction,
        traces_sample_rate=1.0,
    )
    events = capture_events()
    transaction = start_transaction(name="foo")
    transaction.finish()

    (event,) = events
    assert event["transaction"] == "foo"
    assert event["extra"] == {"before_send_transaction_called": True}


def test_option_before_send_transaction_discard(sentry_init, capture_events):
    def before_send_transaction_discard(event, hint):
        return None

    sentry_init(
        before_send_transaction=before_send_transaction_discard,
        traces_sample_rate=1.0,
    )
    events = capture_events()
    transaction = start_transaction(name="foo")
    transaction.finish()

    assert len(events) == 0


def test_option_before_breadcrumb(sentry_init, capture_events, monkeypatch):
    drop_events = False
    drop_breadcrumbs = False
    reports = []

    def record_lost_event(reason, data_category=None, item=None):
        reports.append((reason, data_category))

    def before_send(event, hint):
        assert isinstance(hint["exc_info"][1], ValueError)
        if not drop_events:
            event["extra"] = {"foo": "bar"}
            return event

    def before_breadcrumb(crumb, hint):
        assert hint == {"foo": 42}
        if not drop_breadcrumbs:
            crumb["data"] = {"foo": "bar"}
            return crumb

    sentry_init(before_send=before_send, before_breadcrumb=before_breadcrumb)
    events = capture_events()

    monkeypatch.setattr(
        sentry_sdk.get_client().transport, "record_lost_event", record_lost_event
    )

    def do_this():
        add_breadcrumb(message="Hello", hint={"foo": 42})
        try:
            raise ValueError("aha!")
        except Exception:
            capture_exception()

    do_this()
    drop_breadcrumbs = True
    do_this()
    assert not reports
    drop_events = True
    do_this()
    assert reports == [("before_send", "error")]

    normal, no_crumbs = events

    assert normal["exception"]["values"][0]["type"] == "ValueError"
    (crumb,) = normal["breadcrumbs"]["values"]
    assert "timestamp" in crumb
    assert crumb["message"] == "Hello"
    assert crumb["data"] == {"foo": "bar"}
    assert crumb["type"] == "default"


@pytest.mark.parametrize(
    "enable_tracing, traces_sample_rate, tracing_enabled, updated_traces_sample_rate",
    [
        (None, None, False, None),
        (False, 0.0, False, 0.0),
        (False, 1.0, False, 1.0),
        (None, 1.0, True, 1.0),
        (True, 1.0, True, 1.0),
        (None, 0.0, True, 0.0),  # We use this as - it's configured but turned off
        (True, 0.0, True, 0.0),  # We use this as - it's configured but turned off
        (True, None, True, 1.0),
    ],
)
def test_option_enable_tracing(
    sentry_init,
    enable_tracing,
    traces_sample_rate,
    tracing_enabled,
    updated_traces_sample_rate,
):
    sentry_init(enable_tracing=enable_tracing, traces_sample_rate=traces_sample_rate)
    options = sentry_sdk.get_client().options
    assert has_tracing_enabled(options) is tracing_enabled
    assert options["traces_sample_rate"] == updated_traces_sample_rate


def test_breadcrumb_arguments(sentry_init, capture_events):
    assert_hint = {"bar": 42}

    def before_breadcrumb(crumb, hint):
        assert crumb["foo"] == 42
        assert hint == assert_hint

    sentry_init(before_breadcrumb=before_breadcrumb)

    add_breadcrumb(foo=42, hint=dict(bar=42))
    add_breadcrumb(dict(foo=42), dict(bar=42))
    add_breadcrumb(dict(foo=42), hint=dict(bar=42))
    add_breadcrumb(crumb=dict(foo=42), hint=dict(bar=42))

    assert_hint.clear()
    add_breadcrumb(foo=42)
    add_breadcrumb(crumb=dict(foo=42))


def test_push_scope(sentry_init, capture_events, suppress_deprecation_warnings):
    sentry_init()
    events = capture_events()

    with push_scope() as scope:
        scope.level = "warning"
        try:
            1 / 0
        except Exception as e:
            capture_exception(e)

    (event,) = events

    assert event["level"] == "warning"
    assert "exception" in event


def test_push_scope_null_client(
    sentry_init, capture_events, suppress_deprecation_warnings
):
    """
    This test can be removed when we remove push_scope and the Hub from the SDK.
    """
    sentry_init()
    events = capture_events()

    Hub.current.bind_client(None)

    with push_scope() as scope:
        scope.level = "warning"
        try:
            1 / 0
        except Exception as e:
            capture_exception(e)

    assert len(events) == 0


@pytest.mark.skip(
    reason="This test is not valid anymore, because push_scope just returns the isolation scope. This test should be removed once the Hub is removed"
)
@pytest.mark.parametrize("null_client", (True, False))
def test_push_scope_callback(sentry_init, null_client, capture_events):
    """
    This test can be removed when we remove push_scope and the Hub from the SDK.
    """
    sentry_init()

    if null_client:
        Hub.current.bind_client(None)

    outer_scope = Hub.current.scope

    calls = []

    @push_scope
    def _(scope):
        assert scope is Hub.current.scope
        assert scope is not outer_scope
        calls.append(1)

    # push_scope always needs to execute the callback regardless of
    # client state, because that actually runs usercode in it, not
    # just scope config code
    assert calls == [1]

    # Assert scope gets popped correctly
    assert Hub.current.scope is outer_scope


def test_breadcrumbs(sentry_init, capture_events):
    sentry_init(max_breadcrumbs=10)
    events = capture_events()

    for i in range(20):
        add_breadcrumb(
            category="auth", message="Authenticated user %s" % i, level="info"
        )

    capture_exception(ValueError())
    (event,) = events

    assert len(event["breadcrumbs"]["values"]) == 10
    assert "user 10" in event["breadcrumbs"]["values"][0]["message"]
    assert "user 19" in event["breadcrumbs"]["values"][-1]["message"]

    del events[:]

    for i in range(2):
        add_breadcrumb(
            category="auth", message="Authenticated user %s" % i, level="info"
        )

    sentry_sdk.get_isolation_scope().clear()

    capture_exception(ValueError())
    (event,) = events
    assert len(event["breadcrumbs"]["values"]) == 0


def test_breadcrumb_ordering(sentry_init, capture_events):
    sentry_init()
    events = capture_events()
    now = datetime.datetime.now(datetime.timezone.utc).replace(microsecond=0)

    timestamps = [
        now - datetime.timedelta(days=10),
        now - datetime.timedelta(days=8),
        now - datetime.timedelta(days=12),
    ]

    for timestamp in timestamps:
        add_breadcrumb(
            message="Authenticated at %s" % timestamp,
            category="auth",
            level="info",
            timestamp=timestamp,
        )

    capture_exception(ValueError())
    (event,) = events

    assert len(event["breadcrumbs"]["values"]) == len(timestamps)
    timestamps_from_event = [
        datetime_from_isoformat(x["timestamp"]) for x in event["breadcrumbs"]["values"]
    ]
    assert timestamps_from_event == sorted(timestamps)


def test_breadcrumb_ordering_different_types(sentry_init, capture_events):
    sentry_init()
    events = capture_events()
    now = datetime.datetime.now(datetime.timezone.utc)

    timestamps = [
        now - datetime.timedelta(days=10),
        now - datetime.timedelta(days=8),
        now.replace(microsecond=0) - datetime.timedelta(days=12),
        now - datetime.timedelta(days=9),
        now - datetime.timedelta(days=13),
        now.replace(microsecond=0) - datetime.timedelta(days=11),
    ]

    breadcrumb_timestamps = [
        timestamps[0],
        timestamps[1].isoformat(),
        datetime.datetime.strftime(timestamps[2], "%Y-%m-%dT%H:%M:%S") + "Z",
        datetime.datetime.strftime(timestamps[3], "%Y-%m-%dT%H:%M:%S.%f") + "+00:00",
        datetime.datetime.strftime(timestamps[4], "%Y-%m-%dT%H:%M:%S.%f") + "+0000",
        datetime.datetime.strftime(timestamps[5], "%Y-%m-%dT%H:%M:%S.%f") + "-0000",
    ]

    for i, timestamp in enumerate(timestamps):
        add_breadcrumb(
            message="Authenticated at %s" % timestamp,
            category="auth",
            level="info",
            timestamp=breadcrumb_timestamps[i],
        )

    capture_exception(ValueError())
    (event,) = events

    assert len(event["breadcrumbs"]["values"]) == len(timestamps)
    timestamps_from_event = [
        datetime_from_isoformat(x["timestamp"]) for x in event["breadcrumbs"]["values"]
    ]
    assert timestamps_from_event == sorted(timestamps)


def test_attachments(sentry_init, capture_envelopes):
    sentry_init()
    envelopes = capture_envelopes()

    this_file = os.path.abspath(__file__.rstrip("c"))

    scope = sentry_sdk.get_isolation_scope()
    scope.add_attachment(bytes=b"Hello World!", filename="message.txt")
    scope.add_attachment(path=this_file)

    capture_exception(ValueError())

    (envelope,) = envelopes

    assert len(envelope.items) == 3
    assert envelope.get_event()["exception"] is not None

    attachments = [x for x in envelope.items if x.type == "attachment"]
    (message, pyfile) = attachments

    assert message.headers["filename"] == "message.txt"
    assert message.headers["type"] == "attachment"
    assert message.headers["content_type"] == "text/plain"
    assert message.payload.bytes == message.payload.get_bytes() == b"Hello World!"

    assert pyfile.headers["filename"] == os.path.basename(this_file)
    assert pyfile.headers["type"] == "attachment"
    assert pyfile.headers["content_type"].startswith("text/")
    assert pyfile.payload.bytes is None
    with open(this_file, "rb") as f:
        assert pyfile.payload.get_bytes() == f.read()


@pytest.mark.tests_internal_exceptions
def test_attachments_graceful_failure(
    sentry_init, capture_envelopes, internal_exceptions
):
    sentry_init()
    envelopes = capture_envelopes()

    sentry_sdk.get_isolation_scope().add_attachment(path="non_existent")
    capture_exception(ValueError())

    (envelope,) = envelopes
    assert len(envelope.items) == 2
    assert envelope.items[1].payload.get_bytes() == b""


def test_integration_scoping(sentry_init, capture_events):
    logger = logging.getLogger("test_basics")

    # This client uses the logging integration
    logging_integration = LoggingIntegration(event_level=logging.WARNING)
    sentry_init(default_integrations=False, integrations=[logging_integration])
    events = capture_events()
    logger.warning("This is a warning")
    assert len(events) == 1

    # This client does not
    sentry_init(default_integrations=False)
    events = capture_events()
    logger.warning("This is not a warning")
    assert not events


default_integrations = [
    getattr(
        importlib.import_module(integration.rsplit(".", 1)[0]),
        integration.rsplit(".", 1)[1],
    )
    for integration in _DEFAULT_INTEGRATIONS
]


@pytest.mark.forked
@pytest.mark.parametrize(
    "provided_integrations,default_integrations,disabled_integrations,expected_integrations",
    [
        ([], False, None, set()),
        ([], False, [], set()),
        ([LoggingIntegration()], False, None, {LoggingIntegration}),
        ([], True, None, set(default_integrations)),
        (
            [],
            True,
            [LoggingIntegration(), StdlibIntegration],
            set(default_integrations) - {LoggingIntegration, StdlibIntegration},
        ),
    ],
)
def test_integrations(
    sentry_init,
    provided_integrations,
    default_integrations,
    disabled_integrations,
    expected_integrations,
    reset_integrations,
):
    sentry_init(
        integrations=provided_integrations,
        default_integrations=default_integrations,
        disabled_integrations=disabled_integrations,
        auto_enabling_integrations=False,
        debug=True,
    )
    assert {
        type(integration) for integration in get_client().integrations.values()
    } == expected_integrations


@pytest.mark.skip(
    reason="This test is not valid anymore, because with the new Scopes calling bind_client on the Hub sets the client on the global scope. This test should be removed once the Hub is removed"
)
def test_client_initialized_within_scope(sentry_init, caplog):
    """
    This test can be removed when we remove push_scope and the Hub from the SDK.
    """
    caplog.set_level(logging.WARNING)

    sentry_init()

    with push_scope():
        Hub.current.bind_client(Client())

    (record,) = (x for x in caplog.records if x.levelname == "WARNING")

    assert record.msg.startswith("init() called inside of pushed scope.")


@pytest.mark.skip(
    reason="This test is not valid anymore, because with the new Scopes the push_scope just returns the isolation scope. This test should be removed once the Hub is removed"
)
def test_scope_leaks_cleaned_up(sentry_init, caplog):
    """
    This test can be removed when we remove push_scope and the Hub from the SDK.
    """
    caplog.set_level(logging.WARNING)

    sentry_init()

    old_stack = list(Hub.current._stack)

    with push_scope():
        push_scope()

    assert Hub.current._stack == old_stack

    (record,) = (x for x in caplog.records if x.levelname == "WARNING")

    assert record.message.startswith("Leaked 1 scopes:")


@pytest.mark.skip(
    reason="This test is not valid anymore, because with the new Scopes there is not pushing and popping of scopes. This test should be removed once the Hub is removed"
)
def test_scope_popped_too_soon(sentry_init, caplog):
    """
    This test can be removed when we remove push_scope and the Hub from the SDK.
    """
    caplog.set_level(logging.ERROR)

    sentry_init()

    old_stack = list(Hub.current._stack)

    with push_scope():
        Hub.current.pop_scope_unsafe()

    assert Hub.current._stack == old_stack

    (record,) = (x for x in caplog.records if x.levelname == "ERROR")

    assert record.message == ("Scope popped too soon. Popped 1 scopes too many.")


def test_scope_event_processor_order(sentry_init, capture_events):
    def before_send(event, hint):
        event["message"] += "baz"
        return event

    sentry_init(debug=True, before_send=before_send)
    events = capture_events()

    with new_scope() as scope:

        @scope.add_event_processor
        def foo(event, hint):
            event["message"] += "foo"
            return event

        with new_scope() as scope:

            @scope.add_event_processor
            def bar(event, hint):
                event["message"] += "bar"
                return event

            capture_message("hi")

    (event,) = events

    assert event["message"] == "hifoobarbaz"


def test_capture_event_with_scope_kwargs(sentry_init, capture_events):
    sentry_init()
    events = capture_events()
    capture_event({}, level="info", extras={"foo": "bar"})
    (event,) = events
    assert event["level"] == "info"
    assert event["extra"]["foo"] == "bar"


def test_dedupe_event_processor_drop_records_client_report(
    sentry_init, capture_events, capture_record_lost_event_calls
):
    """
    DedupeIntegration internally has an event_processor that filters duplicate exceptions.
    We want a duplicate exception to be captured only once and the drop being recorded as
    a client report.
    """
    sentry_init()
    events = capture_events()
    record_lost_event_calls = capture_record_lost_event_calls()

    try:
        raise ValueError("aha!")
    except Exception:
        try:
            capture_exception()
            reraise(*sys.exc_info())
        except Exception:
            capture_exception()

    (event,) = events
    (lost_event_call,) = record_lost_event_calls

    assert event["level"] == "error"
    assert "exception" in event
    assert lost_event_call == ("event_processor", "error", None, 1)


def test_event_processor_drop_records_client_report(
    sentry_init, capture_events, capture_record_lost_event_calls
):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()
    record_lost_event_calls = capture_record_lost_event_calls()

    # Ensure full idempotency by restoring the original global event processors list object, not just a copy.
    old_processors = sentry_sdk.scope.global_event_processors

    try:
        sentry_sdk.scope.global_event_processors = (
            sentry_sdk.scope.global_event_processors.copy()
        )

        @add_global_event_processor
        def foo(event, hint):
            return None

        capture_message("dropped")

        with start_transaction(name="dropped"):
            pass

        assert len(events) == 0

        # Using Counter because order of record_lost_event calls does not matter
        assert Counter(record_lost_event_calls) == Counter(
            [
                ("event_processor", "error", None, 1),
                ("event_processor", "transaction", None, 1),
                ("event_processor", "span", None, 1),
            ]
        )

    finally:
        sentry_sdk.scope.global_event_processors = old_processors


@pytest.mark.parametrize(
    "installed_integrations, expected_name",
    [
        # integrations with own name
        (["django"], "sentry.python.django"),
        (["flask"], "sentry.python.flask"),
        (["fastapi"], "sentry.python.fastapi"),
        (["bottle"], "sentry.python.bottle"),
        (["falcon"], "sentry.python.falcon"),
        (["quart"], "sentry.python.quart"),
        (["sanic"], "sentry.python.sanic"),
        (["starlette"], "sentry.python.starlette"),
        (["starlite"], "sentry.python.starlite"),
        (["litestar"], "sentry.python.litestar"),
        (["chalice"], "sentry.python.chalice"),
        (["serverless"], "sentry.python.serverless"),
        (["pyramid"], "sentry.python.pyramid"),
        (["tornado"], "sentry.python.tornado"),
        (["aiohttp"], "sentry.python.aiohttp"),
        (["aws_lambda"], "sentry.python.aws_lambda"),
        (["gcp"], "sentry.python.gcp"),
        (["beam"], "sentry.python.beam"),
        (["asgi"], "sentry.python.asgi"),
        (["wsgi"], "sentry.python.wsgi"),
        # integrations without name
        (["argv"], "sentry.python"),
        (["atexit"], "sentry.python"),
        (["boto3"], "sentry.python"),
        (["celery"], "sentry.python"),
        (["dedupe"], "sentry.python"),
        (["excepthook"], "sentry.python"),
        (["executing"], "sentry.python"),
        (["modules"], "sentry.python"),
        (["pure_eval"], "sentry.python"),
        (["redis"], "sentry.python"),
        (["rq"], "sentry.python"),
        (["sqlalchemy"], "sentry.python"),
        (["stdlib"], "sentry.python"),
        (["threading"], "sentry.python"),
        (["trytond"], "sentry.python"),
        (["logging"], "sentry.python"),
        (["gnu_backtrace"], "sentry.python"),
        (["httpx"], "sentry.python"),
        # precedence of frameworks
        (["flask", "django", "celery"], "sentry.python.django"),
        (["fastapi", "flask", "redis"], "sentry.python.flask"),
        (["bottle", "fastapi", "httpx"], "sentry.python.fastapi"),
        (["falcon", "bottle", "logging"], "sentry.python.bottle"),
        (["quart", "falcon", "gnu_backtrace"], "sentry.python.falcon"),
        (["sanic", "quart", "sqlalchemy"], "sentry.python.quart"),
        (["starlette", "sanic", "rq"], "sentry.python.sanic"),
        (["chalice", "starlette", "modules"], "sentry.python.starlette"),
        (["chalice", "starlite", "modules"], "sentry.python.starlite"),
        (["chalice", "litestar", "modules"], "sentry.python.litestar"),
        (["serverless", "chalice", "pure_eval"], "sentry.python.chalice"),
        (["pyramid", "serverless", "modules"], "sentry.python.serverless"),
        (["tornado", "pyramid", "executing"], "sentry.python.pyramid"),
        (["aiohttp", "tornado", "dedupe"], "sentry.python.tornado"),
        (["aws_lambda", "aiohttp", "boto3"], "sentry.python.aiohttp"),
        (["gcp", "aws_lambda", "atexit"], "sentry.python.aws_lambda"),
        (["beam", "gcp", "argv"], "sentry.python.gcp"),
        (["asgi", "beam", "stdtlib"], "sentry.python.beam"),
        (["wsgi", "asgi", "boto3"], "sentry.python.asgi"),
        (["wsgi", "celery", "redis"], "sentry.python.wsgi"),
    ],
)
def test_get_sdk_name(installed_integrations, expected_name):
    assert get_sdk_name(installed_integrations) == expected_name


def _hello_world(word):
    return "Hello, {}".format(word)


def test_functions_to_trace(sentry_init, capture_events):
    functions_to_trace = [
        {"qualified_name": "tests.test_basics._hello_world"},
        {"qualified_name": "time.sleep"},
    ]

    sentry_init(
        traces_sample_rate=1.0,
        functions_to_trace=functions_to_trace,
    )

    events = capture_events()

    with start_transaction(name="something"):
        time.sleep(0)

        for word in ["World", "You"]:
            _hello_world(word)

    assert len(events) == 1

    (event,) = events

    assert len(event["spans"]) == 3
    assert event["spans"][0]["description"] == "time.sleep"
    assert event["spans"][1]["description"] == "tests.test_basics._hello_world"
    assert event["spans"][2]["description"] == "tests.test_basics._hello_world"


class WorldGreeter:
    def __init__(self, word):
        self.word = word

    def greet(self, new_word=None):
        return "Hello, {}".format(new_word if new_word else self.word)


def test_functions_to_trace_with_class(sentry_init, capture_events):
    functions_to_trace = [
        {"qualified_name": "tests.test_basics.WorldGreeter.greet"},
    ]

    sentry_init(
        traces_sample_rate=1.0,
        functions_to_trace=functions_to_trace,
    )

    events = capture_events()

    with start_transaction(name="something"):
        wg = WorldGreeter("World")
        wg.greet()
        wg.greet("You")

    assert len(events) == 1

    (event,) = events

    assert len(event["spans"]) == 2
    assert event["spans"][0]["description"] == "tests.test_basics.WorldGreeter.greet"
    assert event["spans"][1]["description"] == "tests.test_basics.WorldGreeter.greet"


def test_multiple_setup_integrations_calls():
    first_call_return = setup_integrations([NoOpIntegration()], with_defaults=False)
    assert first_call_return == {NoOpIntegration.identifier: NoOpIntegration()}

    second_call_return = setup_integrations([NoOpIntegration()], with_defaults=False)
    assert second_call_return == {NoOpIntegration.identifier: NoOpIntegration()}


class TracingTestClass:
    @staticmethod
    def static(arg):
        return arg

    @classmethod
    def class_(cls, arg):
        return cls, arg


def test_staticmethod_tracing(sentry_init):
    test_staticmethod_name = "tests.test_basics.TracingTestClass.static"

    assert (
        ".".join(
            [
                TracingTestClass.static.__module__,
                TracingTestClass.static.__qualname__,
            ]
        )
        == test_staticmethod_name
    ), "The test static method was moved or renamed. Please update the name accordingly"

    sentry_init(functions_to_trace=[{"qualified_name": test_staticmethod_name}])

    for instance_or_class in (TracingTestClass, TracingTestClass()):
        with patch_start_tracing_child() as fake_start_child:
            assert instance_or_class.static(1) == 1
            assert fake_start_child.call_count == 1


def test_classmethod_tracing(sentry_init):
    test_classmethod_name = "tests.test_basics.TracingTestClass.class_"

    assert (
        ".".join(
            [
                TracingTestClass.class_.__module__,
                TracingTestClass.class_.__qualname__,
            ]
        )
        == test_classmethod_name
    ), "The test class method was moved or renamed. Please update the name accordingly"

    sentry_init(functions_to_trace=[{"qualified_name": test_classmethod_name}])

    for instance_or_class in (TracingTestClass, TracingTestClass()):
        with patch_start_tracing_child() as fake_start_child:
            assert instance_or_class.class_(1) == (TracingTestClass, 1)
            assert fake_start_child.call_count == 1


def test_last_event_id(sentry_init):
    sentry_init(enable_tracing=True)

    assert last_event_id() is None

    capture_exception(Exception("test"))

    assert last_event_id() is not None


def test_last_event_id_transaction(sentry_init):
    sentry_init(enable_tracing=True)

    assert last_event_id() is None

    with start_transaction(name="test"):
        pass

    assert last_event_id() is None, "Transaction should not set last_event_id"


def test_last_event_id_scope(sentry_init):
    sentry_init(enable_tracing=True)

    # Should not crash
    with isolation_scope() as scope:
        assert scope.last_event_id() is None


def test_hub_constructor_deprecation_warning():
    with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning):
        Hub()


def test_hub_current_deprecation_warning():
    with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning) as warning_records:
        Hub.current

    # Make sure we only issue one deprecation warning
    assert len(warning_records) == 1


def test_hub_main_deprecation_warnings():
    with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning):
        Hub.main


@pytest.mark.skipif(sys.version_info < (3, 11), reason="add_note() not supported")
def test_notes(sentry_init, capture_events):
    sentry_init()
    events = capture_events()
    try:
        e = ValueError("aha!")
        e.add_note("Test 123")
        e.add_note("another note")
        raise e
    except Exception:
        capture_exception()

    (event,) = events

    assert event["exception"]["values"][0]["value"] == "aha!\nTest 123\nanother note"


@pytest.mark.skipif(sys.version_info < (3, 11), reason="add_note() not supported")
def test_notes_safe_str(sentry_init, capture_events):
    class Note2:
        def __repr__(self):
            raise TypeError

        def __str__(self):
            raise TypeError

    sentry_init()
    events = capture_events()
    try:
        e = ValueError("aha!")
        e.add_note("note 1")
        e.__notes__.append(Note2())  # type: ignore
        e.add_note("note 3")
        e.__notes__.append(2)  # type: ignore
        raise e
    except Exception:
        capture_exception()

    (event,) = events

    assert event["exception"]["values"][0]["value"] == "aha!\nnote 1\nnote 3"
sentry-python-2.18.0/tests/test_client.py000066400000000000000000001265271471214654000205170ustar00rootroot00000000000000import os
import json
import subprocess
import sys
import time
from collections import Counter, defaultdict
from collections.abc import Mapping
from textwrap import dedent
from unittest import mock

import pytest

import sentry_sdk
from sentry_sdk import (
    Hub,
    Client,
    add_breadcrumb,
    configure_scope,
    capture_message,
    capture_exception,
    capture_event,
    set_tag,
)
from sentry_sdk.spotlight import DEFAULT_SPOTLIGHT_URL
from sentry_sdk.utils import capture_internal_exception
from sentry_sdk.integrations.executing import ExecutingIntegration
from sentry_sdk.transport import Transport
from sentry_sdk.serializer import MAX_DATABAG_BREADTH
from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, DEFAULT_MAX_VALUE_LENGTH

from typing import TYPE_CHECKING

if TYPE_CHECKING:
    from collections.abc import Callable
    from typing import Any, Optional, Union
    from sentry_sdk._types import Event


maximum_python_312 = pytest.mark.skipif(
    sys.version_info > (3, 12),
    reason="Since Python 3.13, `FrameLocalsProxy` skips items of `locals()` that have non-`str` keys; this is a CPython implementation detail: https://github.com/python/cpython/blame/7b413952e817ae87bfda2ac85dd84d30a6ce743b/Objects/frameobject.c#L148",
)


class EnvelopeCapturedError(Exception):
    pass


class _TestTransport(Transport):
    def capture_envelope(self, envelope):
        raise EnvelopeCapturedError(envelope)


def test_transport_option(monkeypatch):
    if "SENTRY_DSN" in os.environ:
        monkeypatch.delenv("SENTRY_DSN")

    dsn = "https://foo@sentry.io/123"
    dsn2 = "https://bar@sentry.io/124"
    assert str(Client(dsn=dsn).dsn) == dsn
    assert Client().dsn is None

    monkeypatch.setenv("SENTRY_DSN", dsn)
    transport = _TestTransport({"dsn": dsn2})
    assert str(transport.parsed_dsn) == dsn2
    assert str(Client(transport=transport).dsn) == dsn


@pytest.mark.parametrize(
    "testcase",
    [
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": "https://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": "https://localhost/123",
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": "https://localhost/123",
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": None,
        },
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": None,
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": None,
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": "",
            "arg_https_proxy": "",
            "expected_proxy_scheme": None,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": None,
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "",
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": "",
            "arg_https_proxy": None,
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "",
            "expected_proxy_scheme": None,
        },
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": "https://localhost/123",
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        # NO_PROXY testcases
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": "http://localhost/123",
            "env_https_proxy": None,
            "env_no_proxy": "sentry.io,example.com",
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": None,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": "https://localhost/123",
            "env_no_proxy": "example.com,sentry.io",
            "arg_http_proxy": None,
            "arg_https_proxy": None,
            "expected_proxy_scheme": None,
        },
        {
            "dsn": "http://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "env_no_proxy": "sentry.io,example.com",
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": None,
            "expected_proxy_scheme": "http",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "env_no_proxy": "sentry.io,example.com",
            "arg_http_proxy": None,
            "arg_https_proxy": "https://localhost/123",
            "expected_proxy_scheme": "https",
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "env_http_proxy": None,
            "env_https_proxy": None,
            "env_no_proxy": "sentry.io,example.com",
            "arg_http_proxy": None,
            "arg_https_proxy": "https://localhost/123",
            "expected_proxy_scheme": "https",
            "arg_proxy_headers": {"Test-Header": "foo-bar"},
        },
    ],
)
@pytest.mark.parametrize(
    "http2", [True, False] if sys.version_info >= (3, 8) else [False]
)
def test_proxy(monkeypatch, testcase, http2):
    if testcase["env_http_proxy"] is not None:
        monkeypatch.setenv("HTTP_PROXY", testcase["env_http_proxy"])
    if testcase["env_https_proxy"] is not None:
        monkeypatch.setenv("HTTPS_PROXY", testcase["env_https_proxy"])
    if testcase.get("env_no_proxy") is not None:
        monkeypatch.setenv("NO_PROXY", testcase["env_no_proxy"])

    kwargs = {}

    if http2:
        kwargs["_experiments"] = {"transport_http2": True}

    if testcase["arg_http_proxy"] is not None:
        kwargs["http_proxy"] = testcase["arg_http_proxy"]
    if testcase["arg_https_proxy"] is not None:
        kwargs["https_proxy"] = testcase["arg_https_proxy"]
    if testcase.get("arg_proxy_headers") is not None:
        kwargs["proxy_headers"] = testcase["arg_proxy_headers"]

    client = Client(testcase["dsn"], **kwargs)

    proxy = getattr(
        client.transport._pool,
        "proxy",
        getattr(client.transport._pool, "_proxy_url", None),
    )
    if testcase["expected_proxy_scheme"] is None:
        assert proxy is None
    else:
        scheme = (
            proxy.scheme.decode("ascii")
            if isinstance(proxy.scheme, bytes)
            else proxy.scheme
        )
        assert scheme == testcase["expected_proxy_scheme"]

        if testcase.get("arg_proxy_headers") is not None:
            proxy_headers = (
                dict(
                    (k.decode("ascii"), v.decode("ascii"))
                    for k, v in client.transport._pool._proxy_headers
                )
                if http2
                else client.transport._pool.proxy_headers
            )
            assert proxy_headers == testcase["arg_proxy_headers"]


@pytest.mark.parametrize(
    "testcase",
    [
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": "http://localhost/123",
            "arg_https_proxy": None,
            "should_be_socks_proxy": False,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": "socks4a://localhost/123",
            "arg_https_proxy": None,
            "should_be_socks_proxy": True,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": "socks4://localhost/123",
            "arg_https_proxy": None,
            "should_be_socks_proxy": True,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": "socks5h://localhost/123",
            "arg_https_proxy": None,
            "should_be_socks_proxy": True,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": "socks5://localhost/123",
            "arg_https_proxy": None,
            "should_be_socks_proxy": True,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "socks4a://localhost/123",
            "should_be_socks_proxy": True,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "socks4://localhost/123",
            "should_be_socks_proxy": True,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "socks5h://localhost/123",
            "should_be_socks_proxy": True,
        },
        {
            "dsn": "https://foo@sentry.io/123",
            "arg_http_proxy": None,
            "arg_https_proxy": "socks5://localhost/123",
            "should_be_socks_proxy": True,
        },
    ],
)
@pytest.mark.parametrize(
    "http2", [True, False] if sys.version_info >= (3, 8) else [False]
)
def test_socks_proxy(testcase, http2):
    kwargs = {}

    if http2:
        kwargs["_experiments"] = {"transport_http2": True}

    if testcase["arg_http_proxy"] is not None:
        kwargs["http_proxy"] = testcase["arg_http_proxy"]
    if testcase["arg_https_proxy"] is not None:
        kwargs["https_proxy"] = testcase["arg_https_proxy"]

    client = Client(testcase["dsn"], **kwargs)
    assert ("socks" in str(type(client.transport._pool)).lower()) == testcase[
        "should_be_socks_proxy"
    ], (
        f"Expected {kwargs} to result in SOCKS == {testcase['should_be_socks_proxy']}"
        f"but got {str(type(client.transport._pool))}"
    )


def test_simple_transport(sentry_init):
    events = []
    sentry_init(transport=events.append)
    capture_message("Hello World!")
    assert events[0]["message"] == "Hello World!"


def test_ignore_errors(sentry_init, capture_events):
    sentry_init(ignore_errors=[ZeroDivisionError])
    events = capture_events()

    class MyDivisionError(ZeroDivisionError):
        pass

    def e(exc):
        try:
            raise exc
        except Exception:
            capture_exception()

    e(ZeroDivisionError())
    e(MyDivisionError())
    e(ValueError())

    assert len(events) == 1
    assert events[0]["exception"]["values"][0]["type"] == "ValueError"


def test_include_local_variables_enabled(sentry_init, capture_events):
    sentry_init(include_local_variables=True)
    events = capture_events()
    try:
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    assert all(
        frame["vars"]
        for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
    )


def test_include_local_variables_disabled(sentry_init, capture_events):
    sentry_init(include_local_variables=False)
    events = capture_events()
    try:
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    assert all(
        "vars" not in frame
        for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
    )


def test_include_source_context_enabled(sentry_init, capture_events):
    sentry_init(include_source_context=True)
    events = capture_events()
    try:
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    frame = event["exception"]["values"][0]["stacktrace"]["frames"][0]
    assert "post_context" in frame
    assert "pre_context" in frame
    assert "context_line" in frame


def test_include_source_context_disabled(sentry_init, capture_events):
    sentry_init(include_source_context=False)
    events = capture_events()
    try:
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    frame = event["exception"]["values"][0]["stacktrace"]["frames"][0]
    assert "post_context" not in frame
    assert "pre_context" not in frame
    assert "context_line" not in frame


@pytest.mark.parametrize("integrations", [[], [ExecutingIntegration()]])
def test_function_names(sentry_init, capture_events, integrations):
    sentry_init(integrations=integrations)
    events = capture_events()

    def foo():
        try:
            bar()
        except Exception:
            capture_exception()

    def bar():
        1 / 0

    foo()

    (event,) = events
    (thread,) = event["exception"]["values"]
    functions = [x["function"] for x in thread["stacktrace"]["frames"]]

    if integrations:
        assert functions == [
            "test_function_names..foo",
            "test_function_names..bar",
        ]
    else:
        assert functions == ["foo", "bar"]


def test_attach_stacktrace_enabled(sentry_init, capture_events):
    sentry_init(attach_stacktrace=True)
    events = capture_events()

    def foo():
        bar()

    def bar():
        capture_message("HI")

    foo()

    (event,) = events
    (thread,) = event["threads"]["values"]
    functions = [x["function"] for x in thread["stacktrace"]["frames"]]

    assert functions[-2:] == ["foo", "bar"]


def test_attach_stacktrace_enabled_no_locals(sentry_init, capture_events):
    sentry_init(attach_stacktrace=True, include_local_variables=False)
    events = capture_events()

    def foo():
        bar()

    def bar():
        capture_message("HI")

    foo()

    (event,) = events
    (thread,) = event["threads"]["values"]
    local_vars = [x.get("vars") for x in thread["stacktrace"]["frames"]]
    assert local_vars[-2:] == [None, None]


def test_attach_stacktrace_in_app(sentry_init, capture_events):
    sentry_init(attach_stacktrace=True, in_app_exclude=["_pytest"])
    events = capture_events()

    capture_message("hi")

    (event,) = events
    (thread,) = event["threads"]["values"]
    frames = thread["stacktrace"]["frames"]
    pytest_frames = [f for f in frames if f["module"].startswith("_pytest")]
    assert pytest_frames
    assert all(f["in_app"] is False for f in pytest_frames)


def test_attach_stacktrace_disabled(sentry_init, capture_events):
    sentry_init(attach_stacktrace=False)
    events = capture_events()
    capture_message("HI")

    (event,) = events
    assert "threads" not in event


def test_capture_event_works(sentry_init):
    sentry_init(transport=_TestTransport())
    pytest.raises(EnvelopeCapturedError, lambda: capture_event({}))
    pytest.raises(EnvelopeCapturedError, lambda: capture_event({}))


@pytest.mark.parametrize("num_messages", [10, 20])
@pytest.mark.parametrize(
    "http2", [True, False] if sys.version_info >= (3, 8) else [False]
)
def test_atexit(tmpdir, monkeypatch, num_messages, http2):
    if http2:
        options = '_experiments={"transport_http2": True}'
        transport = "Http2Transport"
    else:
        options = ""
        transport = "HttpTransport"

    app = tmpdir.join("app.py")
    app.write(
        dedent(
            """
    import time
    from sentry_sdk import init, transport, capture_message

    def capture_envelope(self, envelope):
        time.sleep(0.1)
        event = envelope.get_event() or dict()
        message = event.get("message", "")
        print(message)

    transport.{transport}.capture_envelope = capture_envelope
    init("http://foobar@localhost/123", shutdown_timeout={num_messages}, {options})

    for _ in range({num_messages}):
        capture_message("HI")
    """.format(
                transport=transport, options=options, num_messages=num_messages
            )
        )
    )

    start = time.time()
    output = subprocess.check_output([sys.executable, str(app)])
    end = time.time()

    # Each message takes at least 0.1 seconds to process
    assert int(end - start) >= num_messages / 10

    assert output.count(b"HI") == num_messages


def test_configure_scope_available(
    sentry_init, request, monkeypatch, suppress_deprecation_warnings
):
    """
    Test that scope is configured if client is configured

    This test can be removed once configure_scope and the Hub are removed.
    """
    sentry_init()

    with configure_scope() as scope:
        assert scope is Hub.current.scope
        scope.set_tag("foo", "bar")

    calls = []

    def callback(scope):
        calls.append(scope)
        scope.set_tag("foo", "bar")

    assert configure_scope(callback) is None
    assert len(calls) == 1
    assert calls[0] is Hub.current.scope


@pytest.mark.tests_internal_exceptions
def test_client_debug_option_enabled(sentry_init, caplog):
    sentry_init(debug=True)

    capture_internal_exception((ValueError, ValueError("OK"), None))
    assert "OK" in caplog.text


@pytest.mark.tests_internal_exceptions
@pytest.mark.parametrize("with_client", (True, False))
def test_client_debug_option_disabled(with_client, sentry_init, caplog):
    if with_client:
        sentry_init()

    capture_internal_exception((ValueError, ValueError("OK"), None))
    assert "OK" not in caplog.text


@pytest.mark.skip(
    reason="New behavior in SDK 2.0: You have a scope before init and add data to it."
)
def test_scope_initialized_before_client(sentry_init, capture_events):
    """
    This is a consequence of how configure_scope() works. We must
    make `configure_scope()` a noop if no client is configured. Even
    if the user later configures a client: We don't know that.
    """
    with configure_scope() as scope:
        scope.set_tag("foo", 42)

    sentry_init()

    events = capture_events()
    capture_message("hi")
    (event,) = events

    assert "tags" not in event


def test_weird_chars(sentry_init, capture_events):
    sentry_init()
    events = capture_events()
    capture_message("föö".encode("latin1"))
    (event,) = events
    assert json.loads(json.dumps(event)) == event


def test_nan(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        # should_repr_strings=False
        set_tag("mynan", float("nan"))

        # should_repr_strings=True
        nan = float("nan")  # noqa
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events
    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
    (frame,) = frames
    assert frame["vars"]["nan"] == "nan"
    assert event["tags"]["mynan"] == "nan"


def test_cyclic_frame_vars(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        a = {}
        a["a"] = a
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events
    assert event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"] == {
        "a": ""
    }


def test_cyclic_data(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    data = {}
    data["is_cyclic"] = data

    other_data = ""
    data["not_cyclic"] = other_data
    data["not_cyclic2"] = other_data
    sentry_sdk.get_isolation_scope().set_extra("foo", data)

    capture_message("hi")
    (event,) = events

    data = event["extra"]["foo"]
    assert data == {"not_cyclic2": "", "not_cyclic": "", "is_cyclic": ""}


def test_databag_depth_stripping(sentry_init, capture_events, benchmark):
    sentry_init()
    events = capture_events()

    value = ["a"]
    for _ in range(100000):
        value = [value]

    @benchmark
    def inner():
        del events[:]
        try:
            a = value  # noqa
            1 / 0
        except Exception:
            capture_exception()

        (event,) = events

        assert len(json.dumps(event)) < 10000


def test_databag_string_stripping(sentry_init, capture_events, benchmark):
    sentry_init()
    events = capture_events()

    @benchmark
    def inner():
        del events[:]
        try:
            a = "A" * 1000000  # noqa
            1 / 0
        except Exception:
            capture_exception()

        (event,) = events

        assert len(json.dumps(event)) < 10000


def test_databag_breadth_stripping(sentry_init, capture_events, benchmark):
    sentry_init()
    events = capture_events()

    @benchmark
    def inner():
        del events[:]
        try:
            a = ["a"] * 1000000  # noqa
            1 / 0
        except Exception:
            capture_exception()

        (event,) = events

        assert (
            len(event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"])
            == MAX_DATABAG_BREADTH
        )
        assert len(json.dumps(event)) < 10000


def test_chained_exceptions(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        try:
            raise ValueError()
        except Exception:
            1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    e1, e2 = event["exception"]["values"]

    # This is the order all other SDKs send chained exceptions in. Including
    # Raven-Python.

    assert e1["type"] == "ValueError"
    assert e2["type"] == "ZeroDivisionError"


@pytest.mark.tests_internal_exceptions
def test_broken_mapping(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    class C(Mapping):
        def broken(self, *args, **kwargs):
            raise Exception("broken")

        __getitem__ = broken
        __setitem__ = broken
        __delitem__ = broken
        __iter__ = broken
        __len__ = broken

        def __repr__(self):
            return "broken"

    try:
        a = C()  # noqa
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events
    assert (
        event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"]
        == ""
    )


def test_mapping_sends_exception(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    class C(Mapping):
        def __iter__(self):
            try:
                1 / 0
            except ZeroDivisionError:
                capture_exception()
            yield "hi"

        def __len__(self):
            """List length"""
            return 1

        def __getitem__(self, ii):
            """Get a list item"""
            if ii == "hi":
                return "hi"

            raise KeyError()

    try:
        a = C()  # noqa
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    assert event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"] == {
        "hi": "'hi'"
    }


def test_object_sends_exception(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    class C:
        def __repr__(self):
            try:
                1 / 0
            except ZeroDivisionError:
                capture_exception()
            return "hi, i am a repr"

    try:
        a = C()  # noqa
        1 / 0
    except Exception:
        capture_exception()

    (event,) = events

    assert (
        event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"]
        == "hi, i am a repr"
    )


def test_errno_errors(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    class FooError(Exception):
        errno = 69

    capture_exception(FooError())

    (event,) = events

    (exception,) = event["exception"]["values"]
    assert exception["mechanism"]["meta"]["errno"]["number"] == 69


@maximum_python_312
def test_non_string_variables(sentry_init, capture_events):
    """There is some extremely terrible code in the wild that
    inserts non-strings as variable names into `locals()`."""

    sentry_init()
    events = capture_events()

    try:
        locals()[42] = True
        1 / 0
    except ZeroDivisionError:
        capture_exception()

    (event,) = events

    (exception,) = event["exception"]["values"]
    assert exception["type"] == "ZeroDivisionError"
    (frame,) = exception["stacktrace"]["frames"]
    assert frame["vars"]["42"] == "True"


def test_dict_changed_during_iteration(sentry_init, capture_events):
    """
    Some versions of Bottle modify the WSGI environment inside of this __repr__
    impl: https://github.com/bottlepy/bottle/blob/0.12.16/bottle.py#L1386

    See https://github.com/getsentry/sentry-python/pull/298 for discussion
    """
    sentry_init(send_default_pii=True)
    events = capture_events()

    class TooSmartClass:
        def __init__(self, environ):
            self.environ = environ

        def __repr__(self):
            if "my_representation" in self.environ:
                return self.environ["my_representation"]

            self.environ["my_representation"] = ""
            return self.environ["my_representation"]

    try:
        environ = {}
        environ["a"] = TooSmartClass(environ)
        1 / 0
    except ZeroDivisionError:
        capture_exception()

    (event,) = events
    (exception,) = event["exception"]["values"]
    (frame,) = exception["stacktrace"]["frames"]
    assert frame["vars"]["environ"] == {"a": ""}


def test_custom_repr_on_vars(sentry_init, capture_events):
    class Foo:
        pass

    class Fail:
        pass

    def custom_repr(value):
        if isinstance(value, Foo):
            return "custom repr"
        elif isinstance(value, Fail):
            raise ValueError("oops")
        else:
            return None

    sentry_init(custom_repr=custom_repr)
    events = capture_events()

    try:
        my_vars = {"foo": Foo(), "fail": Fail(), "normal": 42}
        1 / 0
    except ZeroDivisionError:
        capture_exception()

    (event,) = events
    (exception,) = event["exception"]["values"]
    (frame,) = exception["stacktrace"]["frames"]
    my_vars = frame["vars"]["my_vars"]
    assert my_vars["foo"] == "custom repr"
    assert my_vars["normal"] == "42"
    assert "Fail object" in my_vars["fail"]


@pytest.mark.parametrize(
    "dsn",
    [
        "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2",
        "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2",
    ],
)
def test_init_string_types(dsn, sentry_init):
    # Allow unicode strings on Python 3 and both on Python 2 (due to
    # unicode_literals)
    #
    # Supporting bytes on Python 3 is not really wrong but probably would be
    # extra code
    sentry_init(dsn)
    assert (
        sentry_sdk.get_client().dsn
        == "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2"
    )


@pytest.mark.parametrize(
    "sdk_options, expected_breadcrumbs",
    [({}, DEFAULT_MAX_BREADCRUMBS), ({"max_breadcrumbs": 50}, 50)],
)
def test_max_breadcrumbs_option(
    sentry_init, capture_events, sdk_options, expected_breadcrumbs
):
    sentry_init(sdk_options)
    events = capture_events()

    for _ in range(1231):
        add_breadcrumb({"type": "sourdough"})

    capture_message("dogs are great")

    assert len(events[0]["breadcrumbs"]["values"]) == expected_breadcrumbs


def test_multiple_positional_args(sentry_init):
    with pytest.raises(TypeError) as exinfo:
        sentry_init(1, None)
    assert "Only single positional argument is expected" in str(exinfo.value)


@pytest.mark.parametrize(
    "sdk_options, expected_data_length",
    [
        ({}, DEFAULT_MAX_VALUE_LENGTH),
        ({"max_value_length": 1800}, 1800),
    ],
)
def test_max_value_length_option(
    sentry_init, capture_events, sdk_options, expected_data_length
):
    sentry_init(sdk_options)
    events = capture_events()

    capture_message("a" * 2000)

    assert len(events[0]["message"]) == expected_data_length


@pytest.mark.parametrize(
    "client_option,env_var_value,debug_output_expected",
    [
        (None, "", False),
        (None, "t", True),
        (None, "1", True),
        (None, "True", True),
        (None, "true", True),
        (None, "f", False),
        (None, "0", False),
        (None, "False", False),
        (None, "false", False),
        (None, "xxx", False),
        (True, "", True),
        (True, "t", True),
        (True, "1", True),
        (True, "True", True),
        (True, "true", True),
        (True, "f", True),
        (True, "0", True),
        (True, "False", True),
        (True, "false", True),
        (True, "xxx", True),
        (False, "", False),
        (False, "t", False),
        (False, "1", False),
        (False, "True", False),
        (False, "true", False),
        (False, "f", False),
        (False, "0", False),
        (False, "False", False),
        (False, "false", False),
        (False, "xxx", False),
    ],
)
@pytest.mark.tests_internal_exceptions
def test_debug_option(
    sentry_init,
    monkeypatch,
    caplog,
    client_option,
    env_var_value,
    debug_output_expected,
):
    monkeypatch.setenv("SENTRY_DEBUG", env_var_value)

    if client_option is None:
        sentry_init()
    else:
        sentry_init(debug=client_option)

    capture_internal_exception((ValueError, ValueError("something is wrong"), None))
    if debug_output_expected:
        assert "something is wrong" in caplog.text
    else:
        assert "something is wrong" not in caplog.text


@pytest.mark.parametrize(
    "client_option,env_var_value,spotlight_url_expected",
    [
        (None, None, None),
        (None, "", None),
        (None, "F", None),
        (False, None, None),
        (False, "", None),
        (False, "t", None),
        (None, "t", DEFAULT_SPOTLIGHT_URL),
        (None, "1", DEFAULT_SPOTLIGHT_URL),
        (True, None, DEFAULT_SPOTLIGHT_URL),
        (True, "http://localhost:8080/slurp", DEFAULT_SPOTLIGHT_URL),
        ("http://localhost:8080/slurp", "f", "http://localhost:8080/slurp"),
        (None, "http://localhost:8080/slurp", "http://localhost:8080/slurp"),
    ],
)
def test_spotlight_option(
    sentry_init,
    monkeypatch,
    client_option,
    env_var_value,
    spotlight_url_expected,
):
    if env_var_value is None:
        monkeypatch.delenv("SENTRY_SPOTLIGHT", raising=False)
    else:
        monkeypatch.setenv("SENTRY_SPOTLIGHT", env_var_value)

    if client_option is None:
        sentry_init()
    else:
        sentry_init(spotlight=client_option)

    client = sentry_sdk.get_client()
    url = client.spotlight.url if client.spotlight else None
    assert (
        url == spotlight_url_expected
    ), f"With config {client_option} and env {env_var_value}"


class IssuesSamplerTestConfig:
    def __init__(
        self,
        expected_events,
        sampler_function=None,
        sample_rate=None,
        exception_to_raise=Exception,
    ):
        # type: (int, Optional[Callable[[Event], Union[float, bool]]], Optional[float], type[Exception]) -> None
        self.sampler_function_mock = (
            None
            if sampler_function is None
            else mock.MagicMock(side_effect=sampler_function)
        )
        self.expected_events = expected_events
        self.sample_rate = sample_rate
        self.exception_to_raise = exception_to_raise

    def init_sdk(self, sentry_init):
        # type: (Callable[[*Any], None]) -> None
        sentry_init(
            error_sampler=self.sampler_function_mock, sample_rate=self.sample_rate
        )

    def raise_exception(self):
        # type: () -> None
        raise self.exception_to_raise()


@mock.patch("sentry_sdk.client.random.random", return_value=0.618)
@pytest.mark.parametrize(
    "test_config",
    (
        # Baseline test with error_sampler only, both floats and bools
        IssuesSamplerTestConfig(sampler_function=lambda *_: 1.0, expected_events=1),
        IssuesSamplerTestConfig(sampler_function=lambda *_: 0.7, expected_events=1),
        IssuesSamplerTestConfig(sampler_function=lambda *_: 0.6, expected_events=0),
        IssuesSamplerTestConfig(sampler_function=lambda *_: 0.0, expected_events=0),
        IssuesSamplerTestConfig(sampler_function=lambda *_: True, expected_events=1),
        IssuesSamplerTestConfig(sampler_function=lambda *_: False, expected_events=0),
        # Baseline test with sample_rate only
        IssuesSamplerTestConfig(sample_rate=1.0, expected_events=1),
        IssuesSamplerTestConfig(sample_rate=0.7, expected_events=1),
        IssuesSamplerTestConfig(sample_rate=0.6, expected_events=0),
        IssuesSamplerTestConfig(sample_rate=0.0, expected_events=0),
        # error_sampler takes precedence over sample_rate
        IssuesSamplerTestConfig(
            sampler_function=lambda *_: 1.0, sample_rate=0.0, expected_events=1
        ),
        IssuesSamplerTestConfig(
            sampler_function=lambda *_: 0.0, sample_rate=1.0, expected_events=0
        ),
        # Different sample rates based on exception, retrieved both from event and hint
        IssuesSamplerTestConfig(
            sampler_function=lambda event, _: {
                "ZeroDivisionError": 1.0,
                "AttributeError": 0.0,
            }[event["exception"]["values"][0]["type"]],
            exception_to_raise=ZeroDivisionError,
            expected_events=1,
        ),
        IssuesSamplerTestConfig(
            sampler_function=lambda event, _: {
                "ZeroDivisionError": 1.0,
                "AttributeError": 0.0,
            }[event["exception"]["values"][0]["type"]],
            exception_to_raise=AttributeError,
            expected_events=0,
        ),
        IssuesSamplerTestConfig(
            sampler_function=lambda _, hint: {
                ZeroDivisionError: 1.0,
                AttributeError: 0.0,
            }[hint["exc_info"][0]],
            exception_to_raise=ZeroDivisionError,
            expected_events=1,
        ),
        IssuesSamplerTestConfig(
            sampler_function=lambda _, hint: {
                ZeroDivisionError: 1.0,
                AttributeError: 0.0,
            }[hint["exc_info"][0]],
            exception_to_raise=AttributeError,
            expected_events=0,
        ),
        # If sampler returns invalid value, we should still send the event
        IssuesSamplerTestConfig(
            sampler_function=lambda *_: "This is an invalid return value for the sampler",
            expected_events=1,
        ),
    ),
)
def test_error_sampler(_, sentry_init, capture_events, test_config):
    test_config.init_sdk(sentry_init)

    events = capture_events()

    try:
        test_config.raise_exception()
    except Exception:
        capture_exception()

    assert len(events) == test_config.expected_events

    if test_config.sampler_function_mock is not None:
        assert test_config.sampler_function_mock.call_count == 1

        # Ensure two arguments (the event and hint) were passed to the sampler function
        assert len(test_config.sampler_function_mock.call_args[0]) == 2


@pytest.mark.forked
@pytest.mark.parametrize(
    "opt,missing_flags",
    [
        # lazy mode with enable-threads, no warning
        [{"enable-threads": True, "lazy-apps": True}, []],
        [{"enable-threads": "true", "lazy-apps": b"1"}, []],
        # preforking mode with enable-threads and py-call-uwsgi-fork-hooks, no warning
        [{"enable-threads": True, "py-call-uwsgi-fork-hooks": True}, []],
        [{"enable-threads": b"true", "py-call-uwsgi-fork-hooks": b"on"}, []],
        # lazy mode, no enable-threads, warning
        [{"lazy-apps": True}, ["--enable-threads"]],
        [{"enable-threads": b"false", "lazy-apps": True}, ["--enable-threads"]],
        [{"enable-threads": b"0", "lazy": True}, ["--enable-threads"]],
        # preforking mode, no enable-threads or py-call-uwsgi-fork-hooks, warning
        [{}, ["--enable-threads", "--py-call-uwsgi-fork-hooks"]],
        [{"processes": b"2"}, ["--enable-threads", "--py-call-uwsgi-fork-hooks"]],
        [{"enable-threads": True}, ["--py-call-uwsgi-fork-hooks"]],
        [{"enable-threads": b"1"}, ["--py-call-uwsgi-fork-hooks"]],
        [
            {"enable-threads": b"false"},
            ["--enable-threads", "--py-call-uwsgi-fork-hooks"],
        ],
        [{"py-call-uwsgi-fork-hooks": True}, ["--enable-threads"]],
    ],
)
def test_uwsgi_warnings(sentry_init, recwarn, opt, missing_flags):
    uwsgi = mock.MagicMock()
    uwsgi.opt = opt
    with mock.patch.dict("sys.modules", uwsgi=uwsgi):
        sentry_init(profiles_sample_rate=1.0)
        if missing_flags:
            assert len(recwarn) == 1
            record = recwarn.pop()
            for flag in missing_flags:
                assert flag in str(record.message)
        else:
            assert not recwarn


class TestSpanClientReports:
    """
    Tests for client reports related to spans.
    """

    @staticmethod
    def span_dropper(spans_to_drop):
        """
        Returns a function that can be used to drop spans from an event.
        """

        def drop_spans(event, _):
            event["spans"] = event["spans"][spans_to_drop:]
            return event

        return drop_spans

    @staticmethod
    def mock_transaction_event(span_count):
        """
        Returns a mock transaction event with the given number of spans.
        """

        return defaultdict(
            mock.MagicMock,
            type="transaction",
            spans=[mock.MagicMock() for _ in range(span_count)],
        )

    def __init__(self, span_count):
        """Configures a test case with the number of spans dropped and whether the transaction was dropped."""
        self.span_count = span_count
        self.expected_record_lost_event_calls = Counter()
        self.before_send = lambda event, _: event
        self.event_processor = lambda event, _: event

    def _update_resulting_calls(self, reason, drops_transactions=0, drops_spans=0):
        """
        Updates the expected calls with the given resulting calls.
        """
        if drops_transactions > 0:
            self.expected_record_lost_event_calls[
                (reason, "transaction", None, drops_transactions)
            ] += 1

        if drops_spans > 0:
            self.expected_record_lost_event_calls[
                (reason, "span", None, drops_spans)
            ] += 1

    def with_before_send(
        self,
        before_send,
        *,
        drops_transactions=0,
        drops_spans=0,
    ):
        self.before_send = before_send
        self._update_resulting_calls(
            "before_send",
            drops_transactions,
            drops_spans,
        )

        return self

    def with_event_processor(
        self,
        event_processor,
        *,
        drops_transactions=0,
        drops_spans=0,
    ):
        self.event_processor = event_processor
        self._update_resulting_calls(
            "event_processor",
            drops_transactions,
            drops_spans,
        )

        return self

    def run(self, sentry_init, capture_record_lost_event_calls):
        """Runs the test case with the configured parameters."""
        sentry_init(before_send_transaction=self.before_send)
        record_lost_event_calls = capture_record_lost_event_calls()

        with sentry_sdk.isolation_scope() as scope:
            scope.add_event_processor(self.event_processor)
            event = self.mock_transaction_event(self.span_count)
            sentry_sdk.get_client().capture_event(event, scope=scope)

        # We use counters to ensure that the calls are made the expected number of times, disregarding order.
        assert Counter(record_lost_event_calls) == self.expected_record_lost_event_calls


@pytest.mark.parametrize(
    "test_config",
    (
        TestSpanClientReports(span_count=10),  # No spans dropped
        TestSpanClientReports(span_count=0).with_before_send(
            lambda e, _: None,
            drops_transactions=1,
            drops_spans=1,
        ),
        TestSpanClientReports(span_count=10).with_before_send(
            lambda e, _: None,
            drops_transactions=1,
            drops_spans=11,
        ),
        TestSpanClientReports(span_count=10).with_before_send(
            TestSpanClientReports.span_dropper(3),
            drops_spans=3,
        ),
        TestSpanClientReports(span_count=10).with_before_send(
            TestSpanClientReports.span_dropper(10),
            drops_spans=10,
        ),
        TestSpanClientReports(span_count=10).with_event_processor(
            lambda e, _: None,
            drops_transactions=1,
            drops_spans=11,
        ),
        TestSpanClientReports(span_count=10).with_event_processor(
            TestSpanClientReports.span_dropper(3),
            drops_spans=3,
        ),
        TestSpanClientReports(span_count=10).with_event_processor(
            TestSpanClientReports.span_dropper(10),
            drops_spans=10,
        ),
        TestSpanClientReports(span_count=10)
        .with_event_processor(
            TestSpanClientReports.span_dropper(3),
            drops_spans=3,
        )
        .with_before_send(
            TestSpanClientReports.span_dropper(5),
            drops_spans=5,
        ),
        TestSpanClientReports(10)
        .with_event_processor(
            TestSpanClientReports.span_dropper(3),
            drops_spans=3,
        )
        .with_before_send(
            lambda e, _: None,
            drops_transactions=1,
            drops_spans=8,  # 3 of the 11 (incl. transaction) spans already dropped
        ),
    ),
)
def test_dropped_transaction(sentry_init, capture_record_lost_event_calls, test_config):
    test_config.run(sentry_init, capture_record_lost_event_calls)
sentry-python-2.18.0/tests/test_conftest.py000066400000000000000000000065211471214654000210550ustar00rootroot00000000000000import pytest


@pytest.mark.parametrize(
    "test_string, expected_result",
    [
        # type matches
        ("dogs are great!", True),  # full containment - beginning
        ("go, dogs, go!", True),  # full containment - middle
        ("I like dogs", True),  # full containment - end
        ("dogs", True),  # equality
        ("", False),  # reverse containment
        ("dog", False),  # reverse containment
        ("good dog!", False),  # partial overlap
        ("cats", False),  # no overlap
        # type mismatches
        (1231, False),
        (11.21, False),
        ([], False),
        ({}, False),
        (True, False),
    ],
)
def test_string_containing(
    test_string, expected_result, StringContaining  # noqa: N803
):
    assert (test_string == StringContaining("dogs")) is expected_result


@pytest.mark.parametrize(
    "test_dict, expected_result",
    [
        # type matches
        ({"dogs": "yes", "cats": "maybe", "spiders": "nope"}, True),  # full containment
        ({"dogs": "yes", "cats": "maybe"}, True),  # equality
        ({}, False),  # reverse containment
        ({"dogs": "yes"}, False),  # reverse containment
        ({"dogs": "yes", "birds": "only outside"}, False),  # partial overlap
        ({"coyotes": "from afar"}, False),  # no overlap
        # type mismatches
        ('{"dogs": "yes", "cats": "maybe"}', False),
        (1231, False),
        (11.21, False),
        ([], False),
        (True, False),
    ],
)
def test_dictionary_containing(
    test_dict, expected_result, DictionaryContaining  # noqa: N803
):
    assert (
        test_dict == DictionaryContaining({"dogs": "yes", "cats": "maybe"})
    ) is expected_result


class Animal:  # noqa: B903
    def __init__(self, name=None, age=None, description=None):
        self.name = name
        self.age = age
        self.description = description


class Dog(Animal):
    pass


class Cat(Animal):
    pass


@pytest.mark.parametrize(
    "test_obj, type_and_attrs_result, type_only_result, attrs_only_result",
    [
        # type matches
        (Dog("Maisey", 7, "silly"), True, True, True),  # full attr containment
        (Dog("Maisey", 7), True, True, True),  # type and attr equality
        (Dog(), False, True, False),  # reverse attr containment
        (Dog("Maisey"), False, True, False),  # reverse attr containment
        (Dog("Charlie", 7, "goofy"), False, True, False),  # partial attr overlap
        (Dog("Bodhi", 6, "floppy"), False, True, False),  # no attr overlap
        # type mismatches
        (Cat("Maisey", 7), False, False, True),  # attr equality
        (Cat("Piper", 1, "doglike"), False, False, False),
        ("Good girl, Maisey", False, False, False),
        ({"name": "Maisey", "age": 7}, False, False, False),
        (1231, False, False, False),
        (11.21, False, False, False),
        ([], False, False, False),
        (True, False, False, False),
    ],
)
def test_object_described_by(
    test_obj,
    type_and_attrs_result,
    type_only_result,
    attrs_only_result,
    ObjectDescribedBy,  # noqa: N803
):
    assert (
        test_obj == ObjectDescribedBy(type=Dog, attrs={"name": "Maisey", "age": 7})
    ) is type_and_attrs_result

    assert (test_obj == ObjectDescribedBy(type=Dog)) is type_only_result

    assert (
        test_obj == ObjectDescribedBy(attrs={"name": "Maisey", "age": 7})
    ) is attrs_only_result
sentry-python-2.18.0/tests/test_crons.py000066400000000000000000000322241471214654000203530ustar00rootroot00000000000000import uuid
from unittest import mock

import pytest

import sentry_sdk

from sentry_sdk.crons import capture_checkin


@sentry_sdk.monitor(monitor_slug="abc123")
def _hello_world(name):
    return "Hello, {}".format(name)


@sentry_sdk.monitor(monitor_slug="def456")
def _break_world(name):
    1 / 0
    return "Hello, {}".format(name)


def _hello_world_contextmanager(name):
    with sentry_sdk.monitor(monitor_slug="abc123"):
        return "Hello, {}".format(name)


def _break_world_contextmanager(name):
    with sentry_sdk.monitor(monitor_slug="def456"):
        1 / 0
        return "Hello, {}".format(name)


@sentry_sdk.monitor(monitor_slug="abc123")
async def _hello_world_async(name):
    return "Hello, {}".format(name)


@sentry_sdk.monitor(monitor_slug="def456")
async def _break_world_async(name):
    1 / 0
    return "Hello, {}".format(name)


async def my_coroutine():
    return


async def _hello_world_contextmanager_async(name):
    with sentry_sdk.monitor(monitor_slug="abc123"):
        await my_coroutine()
        return "Hello, {}".format(name)


async def _break_world_contextmanager_async(name):
    with sentry_sdk.monitor(monitor_slug="def456"):
        await my_coroutine()
        1 / 0
        return "Hello, {}".format(name)


@sentry_sdk.monitor(monitor_slug="ghi789", monitor_config=None)
def _no_monitor_config():
    return


@sentry_sdk.monitor(
    monitor_slug="ghi789",
    monitor_config={
        "schedule": {"type": "crontab", "value": "0 0 * * *"},
        "failure_issue_threshold": 5,
    },
)
def _with_monitor_config():
    return


def test_decorator(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checkin:
        result = _hello_world("Grace")
        assert result == "Hello, Grace"

        # Check for initial checkin
        fake_capture_checkin.assert_has_calls(
            [
                mock.call(
                    monitor_slug="abc123", status="in_progress", monitor_config=None
                ),
            ]
        )

        # Check for final checkin
        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
        assert fake_capture_checkin.call_args[1]["status"] == "ok"
        assert fake_capture_checkin.call_args[1]["duration"]
        assert fake_capture_checkin.call_args[1]["check_in_id"]


def test_decorator_error(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checkin:
        with pytest.raises(ZeroDivisionError):
            result = _break_world("Grace")

        assert "result" not in locals()

        # Check for initial checkin
        fake_capture_checkin.assert_has_calls(
            [
                mock.call(
                    monitor_slug="def456", status="in_progress", monitor_config=None
                ),
            ]
        )

        # Check for final checkin
        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
        assert fake_capture_checkin.call_args[1]["status"] == "error"
        assert fake_capture_checkin.call_args[1]["duration"]
        assert fake_capture_checkin.call_args[1]["check_in_id"]


def test_contextmanager(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checkin:
        result = _hello_world_contextmanager("Grace")
        assert result == "Hello, Grace"

        # Check for initial checkin
        fake_capture_checkin.assert_has_calls(
            [
                mock.call(
                    monitor_slug="abc123", status="in_progress", monitor_config=None
                ),
            ]
        )

        # Check for final checkin
        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
        assert fake_capture_checkin.call_args[1]["status"] == "ok"
        assert fake_capture_checkin.call_args[1]["duration"]
        assert fake_capture_checkin.call_args[1]["check_in_id"]


def test_contextmanager_error(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checkin:
        with pytest.raises(ZeroDivisionError):
            result = _break_world_contextmanager("Grace")

        assert "result" not in locals()

        # Check for initial checkin
        fake_capture_checkin.assert_has_calls(
            [
                mock.call(
                    monitor_slug="def456", status="in_progress", monitor_config=None
                ),
            ]
        )

        # Check for final checkin
        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
        assert fake_capture_checkin.call_args[1]["status"] == "error"
        assert fake_capture_checkin.call_args[1]["duration"]
        assert fake_capture_checkin.call_args[1]["check_in_id"]


def test_capture_checkin_simple(sentry_init):
    sentry_init()

    check_in_id = capture_checkin(
        monitor_slug="abc123",
        check_in_id="112233",
        status=None,
        duration=None,
    )
    assert check_in_id == "112233"


def test_sample_rate_doesnt_affect_crons(sentry_init, capture_envelopes):
    sentry_init(sample_rate=0)
    envelopes = capture_envelopes()

    capture_checkin(check_in_id="112233")

    assert len(envelopes) == 1

    check_in = envelopes[0].items[0].payload.json
    assert check_in["check_in_id"] == "112233"


def test_capture_checkin_new_id(sentry_init):
    sentry_init()

    with mock.patch("uuid.uuid4") as mock_uuid:
        mock_uuid.return_value = uuid.UUID("a8098c1a-f86e-11da-bd1a-00112444be1e")
        check_in_id = capture_checkin(
            monitor_slug="abc123",
            check_in_id=None,
            status=None,
            duration=None,
        )

        assert check_in_id == "a8098c1af86e11dabd1a00112444be1e"


def test_end_to_end(sentry_init, capture_envelopes):
    sentry_init()
    envelopes = capture_envelopes()

    capture_checkin(
        monitor_slug="abc123",
        check_in_id="112233",
        duration=123,
        status="ok",
    )

    check_in = envelopes[0].items[0].payload.json

    # Check for final checkin
    assert check_in["check_in_id"] == "112233"
    assert check_in["monitor_slug"] == "abc123"
    assert check_in["status"] == "ok"
    assert check_in["duration"] == 123


def test_monitor_config(sentry_init, capture_envelopes):
    sentry_init()
    envelopes = capture_envelopes()

    monitor_config = {
        "schedule": {"type": "crontab", "value": "0 0 * * *"},
        "failure_issue_threshold": 5,
        "recovery_threshold": 5,
    }

    capture_checkin(monitor_slug="abc123", monitor_config=monitor_config)
    check_in = envelopes[0].items[0].payload.json

    # Check for final checkin
    assert check_in["monitor_slug"] == "abc123"
    assert check_in["monitor_config"] == monitor_config

    # Without passing a monitor_config the field is not in the checkin
    capture_checkin(monitor_slug="abc123")
    check_in = envelopes[1].items[0].payload.json

    assert check_in["monitor_slug"] == "abc123"
    assert "monitor_config" not in check_in


def test_decorator_monitor_config(sentry_init, capture_envelopes):
    sentry_init()
    envelopes = capture_envelopes()

    _with_monitor_config()

    assert len(envelopes) == 2

    for check_in_envelope in envelopes:
        assert len(check_in_envelope.items) == 1
        check_in = check_in_envelope.items[0].payload.json

        assert check_in["monitor_slug"] == "ghi789"
        assert check_in["monitor_config"] == {
            "schedule": {"type": "crontab", "value": "0 0 * * *"},
            "failure_issue_threshold": 5,
        }


def test_decorator_no_monitor_config(sentry_init, capture_envelopes):
    sentry_init()
    envelopes = capture_envelopes()

    _no_monitor_config()

    assert len(envelopes) == 2

    for check_in_envelope in envelopes:
        assert len(check_in_envelope.items) == 1
        check_in = check_in_envelope.items[0].payload.json

        assert check_in["monitor_slug"] == "ghi789"
        assert "monitor_config" not in check_in


def test_capture_checkin_sdk_not_initialized():
    # Tests that the capture_checkin does not raise an error when Sentry SDK is not initialized.
    # sentry_init() is intentionally omitted.
    check_in_id = capture_checkin(
        monitor_slug="abc123",
        check_in_id="112233",
        status=None,
        duration=None,
    )
    assert check_in_id == "112233"


def test_scope_data_in_checkin(sentry_init, capture_envelopes):
    sentry_init()
    envelopes = capture_envelopes()

    valid_keys = [
        # Mandatory event keys
        "type",
        "event_id",
        "timestamp",
        "platform",
        # Optional event keys
        "release",
        "environment",
        "server_name",
        "sdk",
        # Mandatory check-in specific keys
        "check_in_id",
        "monitor_slug",
        "status",
        # Optional check-in specific keys
        "duration",
        "monitor_config",
        "contexts",  # an event processor adds this
    ]

    # Add some data to the scope
    sentry_sdk.add_breadcrumb(message="test breadcrumb")
    sentry_sdk.set_context("test_context", {"test_key": "test_value"})
    sentry_sdk.set_extra("test_extra", "test_value")
    sentry_sdk.set_level("warning")
    sentry_sdk.set_tag("test_tag", "test_value")

    capture_checkin(
        monitor_slug="abc123",
        check_in_id="112233",
        status="ok",
        duration=123,
    )

    (envelope,) = envelopes
    check_in_event = envelope.items[0].payload.json

    invalid_keys = []
    for key in check_in_event.keys():
        if key not in valid_keys:
            invalid_keys.append(key)

    assert len(invalid_keys) == 0, "Unexpected keys found in checkin: {}".format(
        invalid_keys
    )


@pytest.mark.asyncio
async def test_decorator_async(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checkin:
        result = await _hello_world_async("Grace")
        assert result == "Hello, Grace"

        # Check for initial checkin
        fake_capture_checkin.assert_has_calls(
            [
                mock.call(
                    monitor_slug="abc123", status="in_progress", monitor_config=None
                ),
            ]
        )

        # Check for final checkin
        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
        assert fake_capture_checkin.call_args[1]["status"] == "ok"
        assert fake_capture_checkin.call_args[1]["duration"]
        assert fake_capture_checkin.call_args[1]["check_in_id"]


@pytest.mark.asyncio
async def test_decorator_error_async(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checkin:
        with pytest.raises(ZeroDivisionError):
            result = await _break_world_async("Grace")

        assert "result" not in locals()

        # Check for initial checkin
        fake_capture_checkin.assert_has_calls(
            [
                mock.call(
                    monitor_slug="def456", status="in_progress", monitor_config=None
                ),
            ]
        )

        # Check for final checkin
        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
        assert fake_capture_checkin.call_args[1]["status"] == "error"
        assert fake_capture_checkin.call_args[1]["duration"]
        assert fake_capture_checkin.call_args[1]["check_in_id"]


@pytest.mark.asyncio
async def test_contextmanager_async(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checkin:
        result = await _hello_world_contextmanager_async("Grace")
        assert result == "Hello, Grace"

        # Check for initial checkin
        fake_capture_checkin.assert_has_calls(
            [
                mock.call(
                    monitor_slug="abc123", status="in_progress", monitor_config=None
                ),
            ]
        )

        # Check for final checkin
        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
        assert fake_capture_checkin.call_args[1]["status"] == "ok"
        assert fake_capture_checkin.call_args[1]["duration"]
        assert fake_capture_checkin.call_args[1]["check_in_id"]


@pytest.mark.asyncio
async def test_contextmanager_error_async(sentry_init):
    sentry_init()

    with mock.patch(
        "sentry_sdk.crons.decorator.capture_checkin"
    ) as fake_capture_checkin:
        with pytest.raises(ZeroDivisionError):
            result = await _break_world_contextmanager_async("Grace")

        assert "result" not in locals()

        # Check for initial checkin
        fake_capture_checkin.assert_has_calls(
            [
                mock.call(
                    monitor_slug="def456", status="in_progress", monitor_config=None
                ),
            ]
        )

        # Check for final checkin
        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
        assert fake_capture_checkin.call_args[1]["status"] == "error"
        assert fake_capture_checkin.call_args[1]["duration"]
        assert fake_capture_checkin.call_args[1]["check_in_id"]
sentry-python-2.18.0/tests/test_dsc.py000066400000000000000000000266131471214654000200050ustar00rootroot00000000000000"""
This tests test for the correctness of the dynamic sampling context (DSC) in the trace header of envelopes.

The DSC is defined here:
https://develop.sentry.dev/sdk/telemetry/traces/dynamic-sampling-context/#dsc-specification

The DSC is propagated between service using a header called "baggage".
This is not tested in this file.
"""

import pytest

import sentry_sdk
import sentry_sdk.client


def test_dsc_head_of_trace(sentry_init, capture_envelopes):
    """
    Our service is the head of the trace (it starts a new trace)
    and sends a transaction event to Sentry.
    """
    sentry_init(
        dsn="https://mysecret@bla.ingest.sentry.io/12312012",
        release="myapp@0.0.1",
        environment="canary",
        traces_sample_rate=1.0,
    )
    envelopes = capture_envelopes()

    # We start a new transaction
    with sentry_sdk.start_transaction(name="foo"):
        pass

    assert len(envelopes) == 1

    transaction_envelope = envelopes[0]
    envelope_trace_header = transaction_envelope.headers["trace"]

    assert "trace_id" in envelope_trace_header
    assert type(envelope_trace_header["trace_id"]) == str

    assert "public_key" in envelope_trace_header
    assert type(envelope_trace_header["public_key"]) == str
    assert envelope_trace_header["public_key"] == "mysecret"

    assert "sample_rate" in envelope_trace_header
    assert type(envelope_trace_header["sample_rate"]) == str
    assert envelope_trace_header["sample_rate"] == "1.0"

    assert "sampled" in envelope_trace_header
    assert type(envelope_trace_header["sampled"]) == str
    assert envelope_trace_header["sampled"] == "true"

    assert "release" in envelope_trace_header
    assert type(envelope_trace_header["release"]) == str
    assert envelope_trace_header["release"] == "myapp@0.0.1"

    assert "environment" in envelope_trace_header
    assert type(envelope_trace_header["environment"]) == str
    assert envelope_trace_header["environment"] == "canary"

    assert "transaction" in envelope_trace_header
    assert type(envelope_trace_header["transaction"]) == str
    assert envelope_trace_header["transaction"] == "foo"


def test_dsc_continuation_of_trace(sentry_init, capture_envelopes):
    """
    Another service calls our service and passes tracing information to us.
    Our service is continuing the trace and sends a transaction event to Sentry.
    """
    sentry_init(
        dsn="https://mysecret@bla.ingest.sentry.io/12312012",
        release="myapp@0.0.1",
        environment="canary",
        traces_sample_rate=1.0,
    )
    envelopes = capture_envelopes()

    # This is what the upstream service sends us
    sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1"
    baggage = (
        "other-vendor-value-1=foo;bar;baz, "
        "sentry-trace_id=771a43a4192642f0b136d5159a501700, "
        "sentry-public_key=frontendpublickey, "
        "sentry-sample_rate=0.01337, "
        "sentry-sampled=true, "
        "sentry-release=myfrontend@1.2.3, "
        "sentry-environment=bird, "
        "sentry-transaction=bar, "
        "other-vendor-value-2=foo;bar;"
    )
    incoming_http_headers = {
        "HTTP_SENTRY_TRACE": sentry_trace,
        "HTTP_BAGGAGE": baggage,
    }

    # We continue the incoming trace and start a new transaction
    transaction = sentry_sdk.continue_trace(incoming_http_headers)
    with sentry_sdk.start_transaction(transaction, name="foo"):
        pass

    assert len(envelopes) == 1

    transaction_envelope = envelopes[0]
    envelope_trace_header = transaction_envelope.headers["trace"]

    assert "trace_id" in envelope_trace_header
    assert type(envelope_trace_header["trace_id"]) == str
    assert envelope_trace_header["trace_id"] == "771a43a4192642f0b136d5159a501700"

    assert "public_key" in envelope_trace_header
    assert type(envelope_trace_header["public_key"]) == str
    assert envelope_trace_header["public_key"] == "frontendpublickey"

    assert "sample_rate" in envelope_trace_header
    assert type(envelope_trace_header["sample_rate"]) == str
    assert envelope_trace_header["sample_rate"] == "0.01337"

    assert "sampled" in envelope_trace_header
    assert type(envelope_trace_header["sampled"]) == str
    assert envelope_trace_header["sampled"] == "true"

    assert "release" in envelope_trace_header
    assert type(envelope_trace_header["release"]) == str
    assert envelope_trace_header["release"] == "myfrontend@1.2.3"

    assert "environment" in envelope_trace_header
    assert type(envelope_trace_header["environment"]) == str
    assert envelope_trace_header["environment"] == "bird"

    assert "transaction" in envelope_trace_header
    assert type(envelope_trace_header["transaction"]) == str
    assert envelope_trace_header["transaction"] == "bar"


def test_dsc_issue(sentry_init, capture_envelopes):
    """
    Our service is a standalone service that does not have tracing enabled. Just uses Sentry for error reporting.
    """
    sentry_init(
        dsn="https://mysecret@bla.ingest.sentry.io/12312012",
        release="myapp@0.0.1",
        environment="canary",
    )
    envelopes = capture_envelopes()

    # No transaction is started, just an error is captured
    try:
        1 / 0
    except ZeroDivisionError as exp:
        sentry_sdk.capture_exception(exp)

    assert len(envelopes) == 1

    error_envelope = envelopes[0]

    envelope_trace_header = error_envelope.headers["trace"]

    assert "trace_id" in envelope_trace_header
    assert type(envelope_trace_header["trace_id"]) == str

    assert "public_key" in envelope_trace_header
    assert type(envelope_trace_header["public_key"]) == str
    assert envelope_trace_header["public_key"] == "mysecret"

    assert "sample_rate" not in envelope_trace_header

    assert "sampled" not in envelope_trace_header

    assert "release" in envelope_trace_header
    assert type(envelope_trace_header["release"]) == str
    assert envelope_trace_header["release"] == "myapp@0.0.1"

    assert "environment" in envelope_trace_header
    assert type(envelope_trace_header["environment"]) == str
    assert envelope_trace_header["environment"] == "canary"

    assert "transaction" not in envelope_trace_header


def test_dsc_issue_with_tracing(sentry_init, capture_envelopes):
    """
    Our service has tracing enabled and an error occurs in an transaction.
    Envelopes containing errors also have the same DSC than the transaction envelopes.
    """
    sentry_init(
        dsn="https://mysecret@bla.ingest.sentry.io/12312012",
        release="myapp@0.0.1",
        environment="canary",
        traces_sample_rate=1.0,
    )
    envelopes = capture_envelopes()

    # We start a new transaction and an error occurs
    with sentry_sdk.start_transaction(name="foo"):
        try:
            1 / 0
        except ZeroDivisionError as exp:
            sentry_sdk.capture_exception(exp)

    assert len(envelopes) == 2

    error_envelope, transaction_envelope = envelopes

    assert error_envelope.headers["trace"] == transaction_envelope.headers["trace"]

    envelope_trace_header = error_envelope.headers["trace"]

    assert "trace_id" in envelope_trace_header
    assert type(envelope_trace_header["trace_id"]) == str

    assert "public_key" in envelope_trace_header
    assert type(envelope_trace_header["public_key"]) == str
    assert envelope_trace_header["public_key"] == "mysecret"

    assert "sample_rate" in envelope_trace_header
    assert envelope_trace_header["sample_rate"] == "1.0"
    assert type(envelope_trace_header["sample_rate"]) == str

    assert "sampled" in envelope_trace_header
    assert type(envelope_trace_header["sampled"]) == str
    assert envelope_trace_header["sampled"] == "true"

    assert "release" in envelope_trace_header
    assert type(envelope_trace_header["release"]) == str
    assert envelope_trace_header["release"] == "myapp@0.0.1"

    assert "environment" in envelope_trace_header
    assert type(envelope_trace_header["environment"]) == str
    assert envelope_trace_header["environment"] == "canary"

    assert "transaction" in envelope_trace_header
    assert type(envelope_trace_header["transaction"]) == str
    assert envelope_trace_header["transaction"] == "foo"


@pytest.mark.parametrize(
    "traces_sample_rate",
    [
        0,  # no traces will be started, but if incoming traces will be continued (by our instrumentations, not happening in this test)
        None,  # no tracing at all. This service will never create transactions.
    ],
)
def test_dsc_issue_twp(sentry_init, capture_envelopes, traces_sample_rate):
    """
    Our service does not have tracing enabled, but we receive tracing information from an upstream service.
    Error envelopes still contain a DCS. This is called "tracing without performance" or TWP for short.

    This way if I have three services A, B, and C, and A and C have tracing enabled, but B does not,
    we still can see the full trace in Sentry, and associate errors send by service B to Sentry.
    (This test would be service B in this scenario)
    """
    sentry_init(
        dsn="https://mysecret@bla.ingest.sentry.io/12312012",
        release="myapp@0.0.1",
        environment="canary",
        traces_sample_rate=traces_sample_rate,
    )
    envelopes = capture_envelopes()

    # This is what the upstream service sends us
    sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1"
    baggage = (
        "other-vendor-value-1=foo;bar;baz, "
        "sentry-trace_id=771a43a4192642f0b136d5159a501700, "
        "sentry-public_key=frontendpublickey, "
        "sentry-sample_rate=0.01337, "
        "sentry-sampled=true, "
        "sentry-release=myfrontend@1.2.3, "
        "sentry-environment=bird, "
        "sentry-transaction=bar, "
        "other-vendor-value-2=foo;bar;"
    )
    incoming_http_headers = {
        "HTTP_SENTRY_TRACE": sentry_trace,
        "HTTP_BAGGAGE": baggage,
    }

    # We continue the trace (meaning: saving the incoming trace information on the scope)
    # but in this test, we do not start a transaction.
    sentry_sdk.continue_trace(incoming_http_headers)

    # No transaction is started, just an error is captured
    try:
        1 / 0
    except ZeroDivisionError as exp:
        sentry_sdk.capture_exception(exp)

    assert len(envelopes) == 1

    error_envelope = envelopes[0]

    envelope_trace_header = error_envelope.headers["trace"]

    assert "trace_id" in envelope_trace_header
    assert type(envelope_trace_header["trace_id"]) == str
    assert envelope_trace_header["trace_id"] == "771a43a4192642f0b136d5159a501700"

    assert "public_key" in envelope_trace_header
    assert type(envelope_trace_header["public_key"]) == str
    assert envelope_trace_header["public_key"] == "frontendpublickey"

    assert "sample_rate" in envelope_trace_header
    assert type(envelope_trace_header["sample_rate"]) == str
    assert envelope_trace_header["sample_rate"] == "0.01337"

    assert "sampled" in envelope_trace_header
    assert type(envelope_trace_header["sampled"]) == str
    assert envelope_trace_header["sampled"] == "true"

    assert "release" in envelope_trace_header
    assert type(envelope_trace_header["release"]) == str
    assert envelope_trace_header["release"] == "myfrontend@1.2.3"

    assert "environment" in envelope_trace_header
    assert type(envelope_trace_header["environment"]) == str
    assert envelope_trace_header["environment"] == "bird"

    assert "transaction" in envelope_trace_header
    assert type(envelope_trace_header["transaction"]) == str
    assert envelope_trace_header["transaction"] == "bar"
sentry-python-2.18.0/tests/test_envelope.py000066400000000000000000000175711471214654000210540ustar00rootroot00000000000000from sentry_sdk.envelope import Envelope
from sentry_sdk.session import Session
from sentry_sdk import capture_event
import sentry_sdk.client


def generate_transaction_item():
    return {
        "event_id": "15210411201320122115110420122013",
        "type": "transaction",
        "transaction": "/interactions/other-dogs/new-dog",
        "start_timestamp": 1353568872.11122131,
        "timestamp": 1356942672.09040815,
        "contexts": {
            "trace": {
                "trace_id": "12312012123120121231201212312012",
                "span_id": "0415201309082013",
                "parent_span_id": None,
                "description": "",
                "op": "greeting.sniff",
                "dynamic_sampling_context": {
                    "trace_id": "12312012123120121231201212312012",
                    "sample_rate": "1.0",
                    "environment": "dogpark",
                    "release": "off.leash.park",
                    "public_key": "dogsarebadatkeepingsecrets",
                    "transaction": "/interactions/other-dogs/new-dog",
                },
            }
        },
        "spans": [
            {
                "description": "",
                "op": "greeting.sniff",
                "parent_span_id": None,
                "span_id": "0415201309082013",
                "start_timestamp": 1353568872.11122131,
                "timestamp": 1356942672.09040815,
                "trace_id": "12312012123120121231201212312012",
            }
        ],
    }


def test_add_and_get_basic_event():
    envelope = Envelope()

    expected = {"message": "Hello, World!"}
    envelope.add_event(expected)

    assert envelope.get_event() == {"message": "Hello, World!"}


def test_add_and_get_transaction_event():
    envelope = Envelope()

    transaction_item = generate_transaction_item()
    transaction_item.update({"event_id": "a" * 32})
    envelope.add_transaction(transaction_item)

    # typically it should not be possible to be able to add a second transaction;
    # but we do it anyways
    another_transaction_item = generate_transaction_item()
    envelope.add_transaction(another_transaction_item)

    # should only fetch the first inserted transaction event
    assert envelope.get_transaction_event() == transaction_item


def test_add_and_get_session():
    envelope = Envelope()

    expected = Session()
    envelope.add_session(expected)

    for item in envelope:
        if item.type == "session":
            assert item.payload.json == expected.to_json()


def test_envelope_headers(sentry_init, capture_envelopes, monkeypatch):
    monkeypatch.setattr(
        sentry_sdk.client,
        "format_timestamp",
        lambda x: "2012-11-21T12:31:12.415908Z",
    )

    sentry_init(
        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
        traces_sample_rate=1.0,
    )
    envelopes = capture_envelopes()

    capture_event(generate_transaction_item())

    assert len(envelopes) == 1

    assert envelopes[0].headers == {
        "event_id": "15210411201320122115110420122013",
        "sent_at": "2012-11-21T12:31:12.415908Z",
        "trace": {
            "trace_id": "12312012123120121231201212312012",
            "sample_rate": "1.0",
            "environment": "dogpark",
            "release": "off.leash.park",
            "public_key": "dogsarebadatkeepingsecrets",
            "transaction": "/interactions/other-dogs/new-dog",
        },
    }


def test_envelope_with_sized_items():
    """
    Tests that it successfully parses envelopes with
    the item size specified in the header
    """
    envelope_raw = (
        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
        b'{"type":"type1","length":4 }\n1234\n'
        b'{"type":"type2","length":4 }\nabcd\n'
        b'{"type":"type3","length":0}\n\n'
        b'{"type":"type4","length":4 }\nab12\n'
    )
    envelope_raw_eof_terminated = envelope_raw[:-1]

    for envelope in (envelope_raw, envelope_raw_eof_terminated):
        actual = Envelope.deserialize(envelope)

        items = [item for item in actual]

        assert len(items) == 4

        assert items[0].type == "type1"
        assert items[0].get_bytes() == b"1234"

        assert items[1].type == "type2"
        assert items[1].get_bytes() == b"abcd"

        assert items[2].type == "type3"
        assert items[2].get_bytes() == b""

        assert items[3].type == "type4"
        assert items[3].get_bytes() == b"ab12"

        assert actual.headers["event_id"] == "9ec79c33ec9942ab8353589fcb2e04dc"


def test_envelope_with_implicitly_sized_items():
    """
    Tests that it successfully parses envelopes with
    the item size not specified in the header
    """
    envelope_raw = (
        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
        b'{"type":"type1"}\n1234\n'
        b'{"type":"type2"}\nabcd\n'
        b'{"type":"type3"}\n\n'
        b'{"type":"type4"}\nab12\n'
    )
    envelope_raw_eof_terminated = envelope_raw[:-1]

    for envelope in (envelope_raw, envelope_raw_eof_terminated):
        actual = Envelope.deserialize(envelope)
        assert actual.headers["event_id"] == "9ec79c33ec9942ab8353589fcb2e04dc"

        items = [item for item in actual]

        assert len(items) == 4

        assert items[0].type == "type1"
        assert items[0].get_bytes() == b"1234"

        assert items[1].type == "type2"
        assert items[1].get_bytes() == b"abcd"

        assert items[2].type == "type3"
        assert items[2].get_bytes() == b""

        assert items[3].type == "type4"
        assert items[3].get_bytes() == b"ab12"


def test_envelope_with_two_attachments():
    """
    Test that items are correctly parsed in an envelope with to size specified items
    """
    two_attachments = (
        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc","dsn":"https://e12d836b15bb49d7bbf99e64295d995b:@sentry.io/42"}\n'
        + b'{"type":"attachment","length":10,"content_type":"text/plain","filename":"hello.txt"}\n'
        + b"\xef\xbb\xbfHello\r\n\n"
        + b'{"type":"event","length":41,"content_type":"application/json","filename":"application.log"}\n'
        + b'{"message":"hello world","level":"error"}\n'
    )
    two_attachments_eof_terminated = two_attachments[
        :-1
    ]  # last \n is optional, without it should still be a valid envelope

    for envelope_raw in (two_attachments, two_attachments_eof_terminated):
        actual = Envelope.deserialize(envelope_raw)
        items = [item for item in actual]

        assert len(items) == 2
        assert items[0].get_bytes() == b"\xef\xbb\xbfHello\r\n"
        assert items[1].payload.json == {"message": "hello world", "level": "error"}


def test_envelope_with_empty_attachments():
    """
    Test that items are correctly parsed in an envelope with two 0 length items (with size specified in the header
    """
    two_empty_attachments = (
        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
        + b'{"type":"attachment","length":0}\n\n'
        + b'{"type":"attachment","length":0}\n\n'
    )

    two_empty_attachments_eof_terminated = two_empty_attachments[
        :-1
    ]  # last \n is optional, without it should still be a valid envelope

    for envelope_raw in (two_empty_attachments, two_empty_attachments_eof_terminated):
        actual = Envelope.deserialize(envelope_raw)
        items = [item for item in actual]

        assert len(items) == 2
        assert items[0].get_bytes() == b""
        assert items[1].get_bytes() == b""


def test_envelope_without_headers():
    """
    Test that an envelope without headers is parsed successfully
    """
    envelope_without_headers = (
        b"{}\n" + b'{"type":"session"}\n' + b'{"started": "2020-02-07T14:16:00Z"}'
    )
    actual = Envelope.deserialize(envelope_without_headers)
    items = [item for item in actual]

    assert len(items) == 1
    assert items[0].payload.get_bytes() == b'{"started": "2020-02-07T14:16:00Z"}'
sentry-python-2.18.0/tests/test_exceptiongroup.py000066400000000000000000000206021471214654000222770ustar00rootroot00000000000000import sys
import pytest

from sentry_sdk.utils import event_from_exception


try:
    # Python 3.11
    from builtins import ExceptionGroup  # type: ignore
except ImportError:
    # Python 3.10 and below
    ExceptionGroup = None


minimum_python_311 = pytest.mark.skipif(
    sys.version_info < (3, 11), reason="ExceptionGroup tests need Python >= 3.11"
)


@minimum_python_311
def test_exceptiongroup():
    exception_group = None

    try:
        try:
            raise RuntimeError("something")
        except RuntimeError:
            raise ExceptionGroup(
                "nested",
                [
                    ValueError(654),
                    ExceptionGroup(
                        "imports",
                        [
                            ImportError("no_such_module"),
                            ModuleNotFoundError("another_module"),
                        ],
                    ),
                    TypeError("int"),
                ],
            )
    except ExceptionGroup as e:
        exception_group = e

    (event, _) = event_from_exception(
        exception_group,
        client_options={
            "include_local_variables": True,
            "include_source_context": True,
            "max_value_length": 1024,
        },
        mechanism={"type": "test_suite", "handled": False},
    )

    values = event["exception"]["values"]

    # For this test the stacktrace and the module is not important
    for x in values:
        if "stacktrace" in x:
            del x["stacktrace"]
        if "module" in x:
            del x["module"]

    expected_values = [
        {
            "mechanism": {
                "exception_id": 6,
                "handled": False,
                "parent_id": 0,
                "source": "exceptions[2]",
                "type": "chained",
            },
            "type": "TypeError",
            "value": "int",
        },
        {
            "mechanism": {
                "exception_id": 5,
                "handled": False,
                "parent_id": 3,
                "source": "exceptions[1]",
                "type": "chained",
            },
            "type": "ModuleNotFoundError",
            "value": "another_module",
        },
        {
            "mechanism": {
                "exception_id": 4,
                "handled": False,
                "parent_id": 3,
                "source": "exceptions[0]",
                "type": "chained",
            },
            "type": "ImportError",
            "value": "no_such_module",
        },
        {
            "mechanism": {
                "exception_id": 3,
                "handled": False,
                "is_exception_group": True,
                "parent_id": 0,
                "source": "exceptions[1]",
                "type": "chained",
            },
            "type": "ExceptionGroup",
            "value": "imports",
        },
        {
            "mechanism": {
                "exception_id": 2,
                "handled": False,
                "parent_id": 0,
                "source": "exceptions[0]",
                "type": "chained",
            },
            "type": "ValueError",
            "value": "654",
        },
        {
            "mechanism": {
                "exception_id": 1,
                "handled": False,
                "parent_id": 0,
                "source": "__context__",
                "type": "chained",
            },
            "type": "RuntimeError",
            "value": "something",
        },
        {
            "mechanism": {
                "exception_id": 0,
                "handled": False,
                "is_exception_group": True,
                "type": "test_suite",
            },
            "type": "ExceptionGroup",
            "value": "nested",
        },
    ]

    assert values == expected_values


@minimum_python_311
def test_exceptiongroup_simple():
    exception_group = None

    try:
        raise ExceptionGroup(
            "simple",
            [
                RuntimeError("something strange's going on"),
            ],
        )
    except ExceptionGroup as e:
        exception_group = e

    (event, _) = event_from_exception(
        exception_group,
        client_options={
            "include_local_variables": True,
            "include_source_context": True,
            "max_value_length": 1024,
        },
        mechanism={"type": "test_suite", "handled": False},
    )

    exception_values = event["exception"]["values"]

    assert len(exception_values) == 2

    assert exception_values[0]["type"] == "RuntimeError"
    assert exception_values[0]["value"] == "something strange's going on"
    assert exception_values[0]["mechanism"] == {
        "type": "chained",
        "handled": False,
        "exception_id": 1,
        "source": "exceptions[0]",
        "parent_id": 0,
    }

    assert exception_values[1]["type"] == "ExceptionGroup"
    assert exception_values[1]["value"] == "simple"
    assert exception_values[1]["mechanism"] == {
        "type": "test_suite",
        "handled": False,
        "exception_id": 0,
        "is_exception_group": True,
    }
    frame = exception_values[1]["stacktrace"]["frames"][0]
    assert frame["module"] == "tests.test_exceptiongroup"
    assert frame["context_line"] == "        raise ExceptionGroup("


@minimum_python_311
def test_exception_chain_cause():
    exception_chain_cause = ValueError("Exception with cause")
    exception_chain_cause.__context__ = TypeError("Exception in __context__")
    exception_chain_cause.__cause__ = TypeError(
        "Exception in __cause__"
    )  # this implicitly sets exception_chain_cause.__suppress_context__=True

    (event, _) = event_from_exception(
        exception_chain_cause,
        client_options={
            "include_local_variables": True,
            "include_source_context": True,
            "max_value_length": 1024,
        },
        mechanism={"type": "test_suite", "handled": False},
    )

    expected_exception_values = [
        {
            "mechanism": {
                "handled": False,
                "type": "test_suite",
            },
            "module": None,
            "type": "TypeError",
            "value": "Exception in __cause__",
        },
        {
            "mechanism": {
                "handled": False,
                "type": "test_suite",
            },
            "module": None,
            "type": "ValueError",
            "value": "Exception with cause",
        },
    ]

    exception_values = event["exception"]["values"]
    assert exception_values == expected_exception_values


@minimum_python_311
def test_exception_chain_context():
    exception_chain_context = ValueError("Exception with context")
    exception_chain_context.__context__ = TypeError("Exception in __context__")

    (event, _) = event_from_exception(
        exception_chain_context,
        client_options={
            "include_local_variables": True,
            "include_source_context": True,
            "max_value_length": 1024,
        },
        mechanism={"type": "test_suite", "handled": False},
    )

    expected_exception_values = [
        {
            "mechanism": {
                "handled": False,
                "type": "test_suite",
            },
            "module": None,
            "type": "TypeError",
            "value": "Exception in __context__",
        },
        {
            "mechanism": {
                "handled": False,
                "type": "test_suite",
            },
            "module": None,
            "type": "ValueError",
            "value": "Exception with context",
        },
    ]

    exception_values = event["exception"]["values"]
    assert exception_values == expected_exception_values


@minimum_python_311
def test_simple_exception():
    simple_excpetion = ValueError("A simple exception")

    (event, _) = event_from_exception(
        simple_excpetion,
        client_options={
            "include_local_variables": True,
            "include_source_context": True,
            "max_value_length": 1024,
        },
        mechanism={"type": "test_suite", "handled": False},
    )

    expected_exception_values = [
        {
            "mechanism": {
                "handled": False,
                "type": "test_suite",
            },
            "module": None,
            "type": "ValueError",
            "value": "A simple exception",
        },
    ]

    exception_values = event["exception"]["values"]
    assert exception_values == expected_exception_values
sentry-python-2.18.0/tests/test_flag_utils.py000066400000000000000000000021721471214654000213570ustar00rootroot00000000000000from sentry_sdk.flag_utils import FlagBuffer


def test_flag_tracking():
    """Assert the ring buffer works."""
    buffer = FlagBuffer(capacity=3)
    buffer.set("a", True)
    flags = buffer.get()
    assert len(flags) == 1
    assert flags == [{"flag": "a", "result": True}]

    buffer.set("b", True)
    flags = buffer.get()
    assert len(flags) == 2
    assert flags == [{"flag": "a", "result": True}, {"flag": "b", "result": True}]

    buffer.set("c", True)
    flags = buffer.get()
    assert len(flags) == 3
    assert flags == [
        {"flag": "a", "result": True},
        {"flag": "b", "result": True},
        {"flag": "c", "result": True},
    ]

    buffer.set("d", False)
    flags = buffer.get()
    assert len(flags) == 3
    assert flags == [
        {"flag": "b", "result": True},
        {"flag": "c", "result": True},
        {"flag": "d", "result": False},
    ]

    buffer.set("e", False)
    buffer.set("f", False)
    flags = buffer.get()
    assert len(flags) == 3
    assert flags == [
        {"flag": "d", "result": False},
        {"flag": "e", "result": False},
        {"flag": "f", "result": False},
    ]
sentry-python-2.18.0/tests/test_lru_cache.py000066400000000000000000000023431471214654000211530ustar00rootroot00000000000000import pytest

from sentry_sdk._lru_cache import LRUCache


@pytest.mark.parametrize("max_size", [-10, -1, 0])
def test_illegal_size(max_size):
    with pytest.raises(AssertionError):
        LRUCache(max_size=max_size)


def test_simple_set_get():
    cache = LRUCache(1)
    assert cache.get(1) is None
    cache.set(1, 1)
    assert cache.get(1) == 1


def test_overwrite():
    cache = LRUCache(1)
    assert cache.get(1) is None
    cache.set(1, 1)
    assert cache.get(1) == 1
    cache.set(1, 2)
    assert cache.get(1) == 2


def test_cache_eviction():
    cache = LRUCache(3)
    cache.set(1, 1)
    cache.set(2, 2)
    cache.set(3, 3)
    assert cache.get(1) == 1
    assert cache.get(2) == 2
    cache.set(4, 4)
    assert cache.get(3) is None
    assert cache.get(4) == 4


def test_cache_miss():
    cache = LRUCache(1)
    assert cache.get(0) is None


def test_cache_set_overwrite():
    cache = LRUCache(3)
    cache.set(0, 0)
    cache.set(0, 1)
    assert cache.get(0) == 1


def test_cache_get_all():
    cache = LRUCache(3)
    cache.set(0, 0)
    cache.set(1, 1)
    cache.set(2, 2)
    cache.set(3, 3)
    assert cache.get_all() == [(1, 1), (2, 2), (3, 3)]
    cache.get(1)
    assert cache.get_all() == [(2, 2), (3, 3), (1, 1)]
sentry-python-2.18.0/tests/test_metrics.py000066400000000000000000000707161471214654000207050ustar00rootroot00000000000000import sys
import time
import linecache
from unittest import mock

import pytest

import sentry_sdk
from sentry_sdk import metrics
from sentry_sdk.tracing import TRANSACTION_SOURCE_ROUTE
from sentry_sdk.envelope import parse_json

try:
    import gevent
except ImportError:
    gevent = None


minimum_python_37_with_gevent = pytest.mark.skipif(
    gevent and sys.version_info < (3, 7),
    reason="Require Python 3.7 or higher with gevent",
)


def parse_metrics(bytes):
    rv = []
    for line in bytes.splitlines():
        pieces = line.decode("utf-8").split("|")
        payload = pieces[0].split(":")
        name = payload[0]
        values = payload[1:]
        ty = pieces[1]
        ts = None
        tags = {}
        for piece in pieces[2:]:
            if piece[0] == "#":
                for pair in piece[1:].split(","):
                    k, v = pair.split(":", 1)
                    old = tags.get(k)
                    if old is not None:
                        if isinstance(old, list):
                            old.append(v)
                        else:
                            tags[k] = [old, v]
                    else:
                        tags[k] = v
            elif piece[0] == "T":
                ts = int(piece[1:])
            else:
                raise ValueError("unknown piece %r" % (piece,))
        rv.append((ts, name, ty, values, tags))
    rv.sort(key=lambda x: (x[0], x[1], tuple(sorted(tags.items()))))
    return rv


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_increment(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.increment("foobar", 1.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts)
    # python specific alias
    metrics.incr("foobar", 2.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts)
    sentry_sdk.flush()

    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "foobar@none"
    assert m[0][2] == "c"
    assert m[0][3] == ["3.0"]
    assert m[0][4] == {
        "blub": "blah",
        "foo": "bar",
        "release": "fun-release",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    assert parse_json(meta_item.payload.get_bytes()) == {
        "timestamp": mock.ANY,
        "mapping": {
            "c:foobar@none": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ]
        },
    }


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_timing(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts):
        time.sleep(0.1)
    sentry_sdk.flush()

    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "whatever@second"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 1
    assert float(m[0][3][0]) >= 0.1
    assert m[0][4] == {
        "blub": "blah",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    json = parse_json(meta_item.payload.get_bytes())
    assert json == {
        "timestamp": mock.ANY,
        "mapping": {
            "d:whatever@second": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ]
        },
    }

    loc = json["mapping"]["d:whatever@second"][0]
    line = linecache.getline(loc["abs_path"], loc["lineno"])
    assert (
        line.strip()
        == 'with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts):'
    )


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_timing_decorator(
    sentry_init, capture_envelopes, maybe_monkeypatched_threading
):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    envelopes = capture_envelopes()

    @metrics.timing("whatever-1", tags={"x": "y"})
    def amazing():
        time.sleep(0.1)
        return 42

    @metrics.timing("whatever-2", tags={"x": "y"}, unit="nanosecond")
    def amazing_nano():
        time.sleep(0.01)
        return 23

    assert amazing() == 42
    assert amazing_nano() == 23
    sentry_sdk.flush()

    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 2
    assert m[0][1] == "whatever-1@second"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 1
    assert float(m[0][3][0]) >= 0.1
    assert m[0][4] == {
        "x": "y",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert m[1][1] == "whatever-2@nanosecond"
    assert m[1][2] == "d"
    assert len(m[1][3]) == 1
    assert float(m[1][3][0]) >= 10000000.0
    assert m[1][4] == {
        "x": "y",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    json = parse_json(meta_item.payload.get_bytes())
    assert json == {
        "timestamp": mock.ANY,
        "mapping": {
            "d:whatever-1@second": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ],
            "d:whatever-2@nanosecond": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ],
        },
    }

    # XXX: this is not the best location.  It would probably be better to
    # report the location in the function, however that is quite a bit
    # tricker to do since we report from outside the function so we really
    # only see the callsite.
    loc = json["mapping"]["d:whatever-1@second"][0]
    line = linecache.getline(loc["abs_path"], loc["lineno"])
    assert line.strip() == "assert amazing() == 42"


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_timing_basic(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.timing("timing", 1.0, tags={"a": "b"}, timestamp=ts)
    metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts)
    metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts)
    metrics.timing("timing", 3.0, tags={"a": "b"}, timestamp=ts)
    sentry_sdk.flush()

    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "timing@second"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 4
    assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0]
    assert m[0][4] == {
        "a": "b",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    assert parse_json(meta_item.payload.get_bytes()) == {
        "timestamp": mock.ANY,
        "mapping": {
            "d:timing@second": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ]
        },
    }


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_distribution(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)
    metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
    metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
    metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts)
    sentry_sdk.flush()

    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "dist@none"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 4
    assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0]
    assert m[0][4] == {
        "a": "b",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    json = parse_json(meta_item.payload.get_bytes())
    assert json == {
        "timestamp": mock.ANY,
        "mapping": {
            "d:dist@none": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ]
        },
    }

    loc = json["mapping"]["d:dist@none"][0]
    line = linecache.getline(loc["abs_path"], loc["lineno"])
    assert (
        line.strip()
        == 'metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)'
    )


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_set(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": True},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.set("my-set", "peter", tags={"magic": "puff"}, timestamp=ts)
    metrics.set("my-set", "paul", tags={"magic": "puff"}, timestamp=ts)
    metrics.set("my-set", "mary", tags={"magic": "puff"}, timestamp=ts)
    sentry_sdk.flush()

    (envelope,) = envelopes
    statsd_item, meta_item = envelope.items

    assert statsd_item.headers["type"] == "statsd"
    m = parse_metrics(statsd_item.payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "my-set@none"
    assert m[0][2] == "s"
    assert len(m[0][3]) == 3
    assert sorted(map(int, m[0][3])) == [354582103, 2513273657, 3329318813]
    assert m[0][4] == {
        "magic": "puff",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert meta_item.headers["type"] == "metric_meta"
    assert parse_json(meta_item.payload.get_bytes()) == {
        "timestamp": mock.ANY,
        "mapping": {
            "s:my-set@none": [
                {
                    "type": "location",
                    "filename": "tests/test_metrics.py",
                    "abs_path": __file__,
                    "function": sys._getframe().f_code.co_name,
                    "module": __name__,
                    "lineno": mock.ANY,
                    "pre_context": mock.ANY,
                    "context_line": mock.ANY,
                    "post_context": mock.ANY,
                }
            ]
        },
    }


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_gauge(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": False},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts)
    metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts)
    metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts)
    sentry_sdk.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "my-gauge@none"
    assert m[0][2] == "g"
    assert len(m[0][3]) == 5
    assert list(map(float, m[0][3])) == [30.0, 10.0, 30.0, 60.0, 3.0]
    assert m[0][4] == {
        "x": "y",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_multiple(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": False},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts)
    metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts)
    metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts)
    for _ in range(10):
        metrics.increment("counter-1", 1.0, timestamp=ts)
    metrics.increment("counter-2", 1.0, timestamp=ts)

    sentry_sdk.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 3

    assert m[0][1] == "counter-1@none"
    assert m[0][2] == "c"
    assert list(map(float, m[0][3])) == [10.0]
    assert m[0][4] == {
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert m[1][1] == "counter-2@none"
    assert m[1][2] == "c"
    assert list(map(float, m[1][3])) == [1.0]
    assert m[1][4] == {
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert m[2][1] == "my-gauge@none"
    assert m[2][2] == "g"
    assert len(m[2][3]) == 5
    assert list(map(float, m[2][3])) == [30.0, 10.0, 30.0, 60.0, 3.0]
    assert m[2][4] == {
        "x": "y",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_transaction_name(
    sentry_init, capture_envelopes, maybe_monkeypatched_threading
):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": False},
    )
    ts = time.time()
    envelopes = capture_envelopes()

    sentry_sdk.get_current_scope().set_transaction_name(
        "/user/{user_id}", source="route"
    )
    metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)
    metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
    metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
    metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts)

    sentry_sdk.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "dist@none"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 4
    assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0]
    assert m[0][4] == {
        "a": "b",
        "transaction": "/user/{user_id}",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_metric_summaries(
    sentry_init, capture_envelopes, maybe_monkeypatched_threading
):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        enable_tracing=True,
    )
    ts = time.time()
    envelopes = capture_envelopes()

    with sentry_sdk.start_transaction(
        op="stuff", name="/foo", source=TRANSACTION_SOURCE_ROUTE
    ) as transaction:
        metrics.increment("root-counter", timestamp=ts)
        with metrics.timing("my-timer-metric", tags={"a": "b"}, timestamp=ts):
            for x in range(10):
                metrics.distribution("my-dist", float(x), timestamp=ts)

    sentry_sdk.flush()

    (transaction, envelope) = envelopes

    # Metrics Emission
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 3

    assert m[0][1] == "my-dist@none"
    assert m[0][2] == "d"
    assert len(m[0][3]) == 10
    assert sorted(m[0][3]) == list(map(str, map(float, range(10))))
    assert m[0][4] == {
        "transaction": "/foo",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert m[1][1] == "my-timer-metric@second"
    assert m[1][2] == "d"
    assert len(m[1][3]) == 1
    assert m[1][4] == {
        "a": "b",
        "transaction": "/foo",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    assert m[2][1] == "root-counter@none"
    assert m[2][2] == "c"
    assert m[2][3] == ["1.0"]
    assert m[2][4] == {
        "transaction": "/foo",
        "release": "fun-release@1.0.0",
        "environment": "not-fun-env",
    }

    # Measurement Attachment
    t = transaction.items[0].get_transaction_event()

    assert t["_metrics_summary"] == {
        "c:root-counter@none": [
            {
                "count": 1,
                "min": 1.0,
                "max": 1.0,
                "sum": 1.0,
                "tags": {
                    "transaction": "/foo",
                    "release": "fun-release@1.0.0",
                    "environment": "not-fun-env",
                },
            }
        ]
    }

    assert t["spans"][0]["_metrics_summary"]["d:my-dist@none"] == [
        {
            "count": 10,
            "min": 0.0,
            "max": 9.0,
            "sum": 45.0,
            "tags": {
                "environment": "not-fun-env",
                "release": "fun-release@1.0.0",
                "transaction": "/foo",
            },
        }
    ]

    assert t["spans"][0]["tags"] == {"a": "b"}
    (timer,) = t["spans"][0]["_metrics_summary"]["d:my-timer-metric@second"]
    assert timer["count"] == 1
    assert timer["max"] == timer["min"] == timer["sum"]
    assert timer["sum"] > 0
    assert timer["tags"] == {
        "a": "b",
        "environment": "not-fun-env",
        "release": "fun-release@1.0.0",
        "transaction": "/foo",
    }


@minimum_python_37_with_gevent
@pytest.mark.forked
@pytest.mark.parametrize(
    "metric_name,metric_unit,expected_name",
    [
        ("first-metric", "nano-second", "first-metric@nanosecond"),
        ("another_metric?", "nano second", "another_metric_@nanosecond"),
        (
            "metric",
            "nanosecond",
            "metric@nanosecond",
        ),
        (
            "my.amaze.metric I guess",
            "nano|\nsecond",
            "my.amaze.metric_I_guess@nanosecond",
        ),
        ("métríc", "nanöseconď", "m_tr_c@nansecon"),
    ],
)
def test_metric_name_normalization(
    sentry_init,
    capture_envelopes,
    metric_name,
    metric_unit,
    expected_name,
    maybe_monkeypatched_threading,
):
    sentry_init(
        _experiments={"enable_metrics": True, "metric_code_locations": False},
    )
    envelopes = capture_envelopes()

    metrics.distribution(metric_name, 1.0, unit=metric_unit)

    sentry_sdk.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"

    parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes())
    assert len(parsed_metrics) == 1

    name = parsed_metrics[0][1]
    assert name == expected_name


@minimum_python_37_with_gevent
@pytest.mark.forked
@pytest.mark.parametrize(
    "metric_tag,expected_tag",
    [
        ({"f-oo|bar": "%$foo/"}, {"f-oobar": "%$foo/"}),
        ({"foo$.$.$bar": "blah{}"}, {"foo..bar": "blah{}"}),
        (
            {"foö-bar": "snöwmän"},
            {"fo-bar": "snöwmän"},
        ),
        ({"route": "GET /foo"}, {"route": "GET /foo"}),
        ({"__bar__": "this | or , that"}, {"__bar__": "this \\u{7c} or \\u{2c} that"}),
        ({"foo/": "hello!\n\r\t\\"}, {"foo/": "hello!\\n\\r\\t\\\\"}),
    ],
)
def test_metric_tag_normalization(
    sentry_init,
    capture_envelopes,
    metric_tag,
    expected_tag,
    maybe_monkeypatched_threading,
):
    sentry_init(
        _experiments={"enable_metrics": True, "metric_code_locations": False},
    )
    envelopes = capture_envelopes()

    metrics.distribution("a", 1.0, tags=metric_tag)

    sentry_sdk.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"

    parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes())
    assert len(parsed_metrics) == 1

    tags = parsed_metrics[0][4]

    expected_tag_key, expected_tag_value = expected_tag.popitem()
    assert expected_tag_key in tags
    assert tags[expected_tag_key] == expected_tag_value


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_before_emit_metric(
    sentry_init, capture_envelopes, maybe_monkeypatched_threading
):
    def before_emit(key, value, unit, tags):
        if key == "removed-metric" or value == 47 or unit == "unsupported":
            return False

        tags["extra"] = "foo"
        del tags["release"]
        # this better be a noop!
        metrics.increment("shitty-recursion")
        return True

    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={
            "enable_metrics": True,
            "metric_code_locations": False,
            "before_emit_metric": before_emit,
        },
    )
    envelopes = capture_envelopes()

    metrics.increment("removed-metric", 1.0)
    metrics.increment("another-removed-metric", 47)
    metrics.increment("yet-another-removed-metric", 1.0, unit="unsupported")
    metrics.increment("actual-metric", 1.0)
    sentry_sdk.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 1
    assert m[0][1] == "actual-metric@none"
    assert m[0][3] == ["1.0"]
    assert m[0][4] == {
        "extra": "foo",
        "environment": "not-fun-env",
    }


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_aggregator_flush(
    sentry_init, capture_envelopes, maybe_monkeypatched_threading
):
    sentry_init(
        release="fun-release@1.0.0",
        environment="not-fun-env",
        _experiments={
            "enable_metrics": True,
        },
    )
    envelopes = capture_envelopes()

    metrics.increment("a-metric", 1.0)
    sentry_sdk.flush()

    assert len(envelopes) == 1
    assert sentry_sdk.get_client().metrics_aggregator.buckets == {}


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_tag_serialization(
    sentry_init, capture_envelopes, maybe_monkeypatched_threading
):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
        _experiments={"enable_metrics": True, "metric_code_locations": False},
    )
    envelopes = capture_envelopes()

    metrics.increment(
        "counter",
        tags={
            "no-value": None,
            "an-int": 42,
            "a-float": 23.0,
            "a-string": "blah",
            "more-than-one": [1, "zwei", "3.0", None],
        },
    )
    sentry_sdk.flush()

    (envelope,) = envelopes

    assert len(envelope.items) == 1
    assert envelope.items[0].headers["type"] == "statsd"
    m = parse_metrics(envelope.items[0].payload.get_bytes())

    assert len(m) == 1
    assert m[0][4] == {
        "an-int": "42",
        "a-float": "23.0",
        "a-string": "blah",
        "more-than-one": ["1", "3.0", "zwei"],
        "release": "fun-release",
        "environment": "not-fun-env",
    }


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_flush_recursion_protection(
    sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading
):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
        _experiments={"enable_metrics": True},
    )
    envelopes = capture_envelopes()
    test_client = sentry_sdk.get_client()

    real_capture_envelope = test_client.transport.capture_envelope

    def bad_capture_envelope(*args, **kwargs):
        metrics.increment("bad-metric")
        return real_capture_envelope(*args, **kwargs)

    monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope)

    metrics.increment("counter")

    # flush twice to see the inner metric
    sentry_sdk.flush()
    sentry_sdk.flush()

    (envelope,) = envelopes
    m = parse_metrics(envelope.items[0].payload.get_bytes())
    assert len(m) == 1
    assert m[0][1] == "counter@none"


@minimum_python_37_with_gevent
@pytest.mark.forked
def test_flush_recursion_protection_background_flush(
    sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading
):
    monkeypatch.setattr(metrics.MetricsAggregator, "FLUSHER_SLEEP_TIME", 0.01)
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
        _experiments={"enable_metrics": True},
    )
    envelopes = capture_envelopes()
    test_client = sentry_sdk.get_client()

    real_capture_envelope = test_client.transport.capture_envelope

    def bad_capture_envelope(*args, **kwargs):
        metrics.increment("bad-metric")
        return real_capture_envelope(*args, **kwargs)

    monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope)

    metrics.increment("counter")

    # flush via sleep and flag
    sentry_sdk.get_client().metrics_aggregator._force_flush = True
    time.sleep(0.5)

    (envelope,) = envelopes
    m = parse_metrics(envelope.items[0].payload.get_bytes())
    assert len(m) == 1
    assert m[0][1] == "counter@none"


@pytest.mark.skipif(
    not gevent or sys.version_info >= (3, 7),
    reason="Python 3.6 or lower and gevent required",
)
@pytest.mark.forked
def test_disable_metrics_for_old_python_with_gevent(
    sentry_init, capture_envelopes, maybe_monkeypatched_threading
):
    if maybe_monkeypatched_threading != "greenlet":
        pytest.skip("Test specifically for gevent/greenlet")

    sentry_init(
        release="fun-release",
        environment="not-fun-env",
        _experiments={"enable_metrics": True},
    )
    envelopes = capture_envelopes()

    metrics.incr("counter")

    sentry_sdk.flush()

    assert sentry_sdk.get_client().metrics_aggregator is None
    assert not envelopes
sentry-python-2.18.0/tests/test_monitor.py000066400000000000000000000053141471214654000207160ustar00rootroot00000000000000import random
from collections import Counter
from unittest import mock

import sentry_sdk
from sentry_sdk.transport import Transport


class HealthyTestTransport(Transport):
    def capture_envelope(self, _):
        pass

    def is_healthy(self):
        return True


class UnhealthyTestTransport(HealthyTestTransport):
    def is_healthy(self):
        return False


def test_no_monitor_if_disabled(sentry_init):
    sentry_init(
        transport=HealthyTestTransport(),
        enable_backpressure_handling=False,
    )

    assert sentry_sdk.get_client().monitor is None


def test_monitor_if_enabled(sentry_init):
    sentry_init(transport=HealthyTestTransport())

    monitor = sentry_sdk.get_client().monitor
    assert monitor is not None
    assert monitor._thread is None

    assert monitor.is_healthy() is True
    assert monitor.downsample_factor == 0
    assert monitor._thread is not None
    assert monitor._thread.name == "sentry.monitor"


def test_monitor_unhealthy(sentry_init):
    sentry_init(transport=UnhealthyTestTransport())

    monitor = sentry_sdk.get_client().monitor
    monitor.interval = 0.1

    assert monitor.is_healthy() is True

    for i in range(15):
        monitor.run()
        assert monitor.is_healthy() is False
        assert monitor.downsample_factor == (i + 1 if i < 10 else 10)


def test_transaction_uses_downsampled_rate(
    sentry_init, capture_record_lost_event_calls, monkeypatch
):
    sentry_init(
        traces_sample_rate=1.0,
        transport=UnhealthyTestTransport(),
    )

    record_lost_event_calls = capture_record_lost_event_calls()

    monitor = sentry_sdk.get_client().monitor
    monitor.interval = 0.1

    # make sure rng doesn't sample
    monkeypatch.setattr(random, "random", lambda: 0.9)

    assert monitor.is_healthy() is True
    monitor.run()
    assert monitor.is_healthy() is False
    assert monitor.downsample_factor == 1

    with sentry_sdk.start_transaction(name="foobar") as transaction:
        assert transaction.sampled is False
        assert transaction.sample_rate == 0.5

    assert Counter(record_lost_event_calls) == Counter(
        [
            ("backpressure", "transaction", None, 1),
            ("backpressure", "span", None, 1),
        ]
    )


def test_monitor_no_thread_on_shutdown_no_errors(sentry_init):
    sentry_init(transport=HealthyTestTransport())

    # make it seem like the interpreter is shutting down
    with mock.patch(
        "threading.Thread.start",
        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
    ):
        monitor = sentry_sdk.get_client().monitor
        assert monitor is not None
        assert monitor._thread is None
        monitor.run()
        assert monitor._thread is None
sentry-python-2.18.0/tests/test_propagationcontext.py000066400000000000000000000045511471214654000231610ustar00rootroot00000000000000from sentry_sdk.tracing_utils import PropagationContext


def test_empty_context():
    ctx = PropagationContext()

    assert ctx.trace_id is not None
    assert len(ctx.trace_id) == 32

    assert ctx.span_id is not None
    assert len(ctx.span_id) == 16

    assert ctx.parent_span_id is None
    assert ctx.parent_sampled is None
    assert ctx.dynamic_sampling_context is None


def test_context_with_values():
    ctx = PropagationContext(
        trace_id="1234567890abcdef1234567890abcdef",
        span_id="1234567890abcdef",
        parent_span_id="abcdef1234567890",
        parent_sampled=True,
        dynamic_sampling_context={
            "foo": "bar",
        },
    )

    assert ctx.trace_id == "1234567890abcdef1234567890abcdef"
    assert ctx.span_id == "1234567890abcdef"
    assert ctx.parent_span_id == "abcdef1234567890"
    assert ctx.parent_sampled
    assert ctx.dynamic_sampling_context == {
        "foo": "bar",
    }


def test_lacy_uuids():
    ctx = PropagationContext()
    assert ctx._trace_id is None
    assert ctx._span_id is None

    assert ctx.trace_id is not None  # this sets _trace_id
    assert ctx._trace_id is not None
    assert ctx._span_id is None

    assert ctx.span_id is not None  # this sets _span_id
    assert ctx._trace_id is not None
    assert ctx._span_id is not None


def test_property_setters():
    ctx = PropagationContext()
    ctx.trace_id = "X234567890abcdef1234567890abcdef"
    ctx.span_id = "X234567890abcdef"

    assert ctx._trace_id == "X234567890abcdef1234567890abcdef"
    assert ctx.trace_id == "X234567890abcdef1234567890abcdef"
    assert ctx._span_id == "X234567890abcdef"
    assert ctx.span_id == "X234567890abcdef"


def test_update():
    ctx = PropagationContext()

    other_data = {
        "trace_id": "Z234567890abcdef1234567890abcdef",
        "parent_span_id": "Z234567890abcdef",
        "parent_sampled": False,
        "foo": "bar",
    }
    ctx.update(other_data)

    assert ctx._trace_id == "Z234567890abcdef1234567890abcdef"
    assert ctx.trace_id == "Z234567890abcdef1234567890abcdef"
    assert ctx._span_id is None  # this will be set lazily
    assert ctx.span_id is not None  # this sets _span_id
    assert ctx._span_id is not None
    assert ctx.parent_span_id == "Z234567890abcdef"
    assert not ctx.parent_sampled
    assert ctx.dynamic_sampling_context is None

    assert not hasattr(ctx, "foo")
sentry-python-2.18.0/tests/test_scope.py000066400000000000000000000660351471214654000203470ustar00rootroot00000000000000import copy
import os
import pytest
from unittest import mock

import sentry_sdk
from sentry_sdk import (
    capture_exception,
    isolation_scope,
    new_scope,
)
from sentry_sdk.client import Client, NonRecordingClient
from sentry_sdk.scope import (
    Scope,
    ScopeType,
    use_isolation_scope,
    use_scope,
    should_send_default_pii,
)


SLOTS_NOT_COPIED = {"client"}
"""__slots__ that are not copied when copying a Scope object."""


def test_copying():
    s1 = Scope()
    s1.fingerprint = {}
    s1.set_tag("foo", "bar")

    s2 = copy.copy(s1)
    assert "foo" in s2._tags

    s1.set_tag("bam", "baz")
    assert "bam" in s1._tags
    assert "bam" not in s2._tags

    assert s1._fingerprint is s2._fingerprint


def test_all_slots_copied():
    scope = Scope()
    scope_copy = copy.copy(scope)

    # Check all attributes are copied
    for attr in set(Scope.__slots__) - SLOTS_NOT_COPIED:
        assert getattr(scope_copy, attr) == getattr(scope, attr)


def test_merging(sentry_init, capture_events):
    sentry_init()

    s = Scope()
    s.set_user({"id": "42"})

    events = capture_events()

    capture_exception(NameError(), scope=s)

    (event,) = events
    assert event["user"] == {"id": "42"}


def test_common_args():
    s = Scope()
    s.update_from_kwargs(
        user={"id": 23},
        level="warning",
        extras={"k": "v"},
        contexts={"os": {"name": "Blafasel"}},
        tags={"x": "y"},
        fingerprint=["foo"],
    )

    s2 = Scope()
    s2.set_extra("foo", "bar")
    s2.set_tag("a", "b")
    s2.set_context("device", {"a": "b"})
    s2.update_from_scope(s)

    assert s._user == {"id": 23}
    assert s._level == "warning"
    assert s._extras == {"k": "v"}
    assert s._contexts == {"os": {"name": "Blafasel"}}
    assert s._tags == {"x": "y"}
    assert s._fingerprint == ["foo"]

    assert s._user == s2._user
    assert s._level == s2._level
    assert s._fingerprint == s2._fingerprint
    assert s2._extras == {"k": "v", "foo": "bar"}
    assert s2._tags == {"a": "b", "x": "y"}
    assert s2._contexts == {"os": {"name": "Blafasel"}, "device": {"a": "b"}}


BAGGAGE_VALUE = (
    "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
    "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
    "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
)

SENTRY_TRACE_VALUE = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1"


@pytest.mark.parametrize(
    "env,excepted_value",
    [
        (
            {
                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
            },
            {
                "sentry-trace": SENTRY_TRACE_VALUE,
            },
        ),
        (
            {
                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
            },
            {
                "baggage": BAGGAGE_VALUE,
            },
        ),
        (
            {
                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
            },
            {
                "sentry-trace": SENTRY_TRACE_VALUE,
                "baggage": BAGGAGE_VALUE,
            },
        ),
        (
            {
                "SENTRY_USE_ENVIRONMENT": "",
                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
            },
            {
                "sentry-trace": SENTRY_TRACE_VALUE,
                "baggage": BAGGAGE_VALUE,
            },
        ),
        (
            {
                "SENTRY_USE_ENVIRONMENT": "True",
                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
            },
            {
                "sentry-trace": SENTRY_TRACE_VALUE,
                "baggage": BAGGAGE_VALUE,
            },
        ),
        (
            {
                "SENTRY_USE_ENVIRONMENT": "no",
                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
            },
            None,
        ),
        (
            {
                "SENTRY_USE_ENVIRONMENT": "True",
                "MY_OTHER_VALUE": "asdf",
                "SENTRY_RELEASE": "1.0.0",
            },
            None,
        ),
    ],
)
def test_load_trace_data_from_env(env, excepted_value):
    new_env = os.environ.copy()
    new_env.update(env)

    with mock.patch.dict(os.environ, new_env):
        s = Scope()
        incoming_trace_data = s._load_trace_data_from_env()
        assert incoming_trace_data == excepted_value


def test_scope_client():
    scope = Scope(ty="test_something")
    assert scope._type == "test_something"
    assert scope.client is not None
    assert scope.client.__class__ == NonRecordingClient

    custom_client = Client()
    scope = Scope(ty="test_more", client=custom_client)
    assert scope._type == "test_more"
    assert scope.client is not None
    assert scope.client.__class__ == Client
    assert scope.client == custom_client


def test_get_current_scope():
    scope = Scope.get_current_scope()
    assert scope is not None
    assert scope.__class__ == Scope
    assert scope._type == ScopeType.CURRENT


def test_get_isolation_scope():
    scope = Scope.get_isolation_scope()
    assert scope is not None
    assert scope.__class__ == Scope
    assert scope._type == ScopeType.ISOLATION


def test_get_global_scope():
    scope = Scope.get_global_scope()
    assert scope is not None
    assert scope.__class__ == Scope
    assert scope._type == ScopeType.GLOBAL


def test_get_client():
    client = Scope.get_client()
    assert client is not None
    assert client.__class__ == NonRecordingClient
    assert not client.is_active()


def test_set_client():
    client1 = Client()
    client2 = Client()
    client3 = Client()

    current_scope = Scope.get_current_scope()
    isolation_scope = Scope.get_isolation_scope()
    global_scope = Scope.get_global_scope()

    current_scope.set_client(client1)
    isolation_scope.set_client(client2)
    global_scope.set_client(client3)

    client = Scope.get_client()
    assert client == client1

    current_scope.set_client(None)
    isolation_scope.set_client(client2)
    global_scope.set_client(client3)

    client = Scope.get_client()
    assert client == client2

    current_scope.set_client(None)
    isolation_scope.set_client(None)
    global_scope.set_client(client3)

    client = Scope.get_client()
    assert client == client3


def test_fork():
    scope = Scope()
    forked_scope = scope.fork()

    assert scope != forked_scope


def test_get_global_scope_tags():
    global_scope1 = Scope.get_global_scope()
    global_scope2 = Scope.get_global_scope()
    assert global_scope1 == global_scope2
    assert global_scope1.client.__class__ == NonRecordingClient
    assert not global_scope1.client.is_active()
    assert global_scope2.client.__class__ == NonRecordingClient
    assert not global_scope2.client.is_active()

    global_scope1.set_tag("tag1", "value")
    tags_scope1 = global_scope1._tags
    tags_scope2 = global_scope2._tags
    assert tags_scope1 == tags_scope2 == {"tag1": "value"}
    assert global_scope1.client.__class__ == NonRecordingClient
    assert not global_scope1.client.is_active()
    assert global_scope2.client.__class__ == NonRecordingClient
    assert not global_scope2.client.is_active()


def test_get_global_with_scope():
    original_global_scope = Scope.get_global_scope()

    with new_scope() as scope:
        in_with_global_scope = Scope.get_global_scope()

        assert scope is not in_with_global_scope
        assert in_with_global_scope is original_global_scope

    after_with_global_scope = Scope.get_global_scope()
    assert after_with_global_scope is original_global_scope


def test_get_global_with_isolation_scope():
    original_global_scope = Scope.get_global_scope()

    with isolation_scope() as scope:
        in_with_global_scope = Scope.get_global_scope()

        assert scope is not in_with_global_scope
        assert in_with_global_scope is original_global_scope

    after_with_global_scope = Scope.get_global_scope()
    assert after_with_global_scope is original_global_scope


def test_get_isolation_scope_tags():
    isolation_scope1 = Scope.get_isolation_scope()
    isolation_scope2 = Scope.get_isolation_scope()
    assert isolation_scope1 == isolation_scope2
    assert isolation_scope1.client.__class__ == NonRecordingClient
    assert not isolation_scope1.client.is_active()
    assert isolation_scope2.client.__class__ == NonRecordingClient
    assert not isolation_scope2.client.is_active()

    isolation_scope1.set_tag("tag1", "value")
    tags_scope1 = isolation_scope1._tags
    tags_scope2 = isolation_scope2._tags
    assert tags_scope1 == tags_scope2 == {"tag1": "value"}
    assert isolation_scope1.client.__class__ == NonRecordingClient
    assert not isolation_scope1.client.is_active()
    assert isolation_scope2.client.__class__ == NonRecordingClient
    assert not isolation_scope2.client.is_active()


def test_get_current_scope_tags():
    scope1 = Scope.get_current_scope()
    scope2 = Scope.get_current_scope()
    assert id(scope1) == id(scope2)
    assert scope1.client.__class__ == NonRecordingClient
    assert not scope1.client.is_active()
    assert scope2.client.__class__ == NonRecordingClient
    assert not scope2.client.is_active()

    scope1.set_tag("tag1", "value")
    tags_scope1 = scope1._tags
    tags_scope2 = scope2._tags
    assert tags_scope1 == tags_scope2 == {"tag1": "value"}
    assert scope1.client.__class__ == NonRecordingClient
    assert not scope1.client.is_active()
    assert scope2.client.__class__ == NonRecordingClient
    assert not scope2.client.is_active()


def test_with_isolation_scope():
    original_current_scope = Scope.get_current_scope()
    original_isolation_scope = Scope.get_isolation_scope()

    with isolation_scope() as scope:
        assert scope._type == ScopeType.ISOLATION

        in_with_current_scope = Scope.get_current_scope()
        in_with_isolation_scope = Scope.get_isolation_scope()

        assert scope is in_with_isolation_scope
        assert in_with_current_scope is not original_current_scope
        assert in_with_isolation_scope is not original_isolation_scope

    after_with_current_scope = Scope.get_current_scope()
    after_with_isolation_scope = Scope.get_isolation_scope()
    assert after_with_current_scope is original_current_scope
    assert after_with_isolation_scope is original_isolation_scope


def test_with_isolation_scope_data():
    """
    When doing `with isolation_scope()` the isolation *and* the current scope are forked,
    to prevent that by setting tags on the current scope in the context manager, data
    bleeds to the outer current scope.
    """
    isolation_scope_before = Scope.get_isolation_scope()
    current_scope_before = Scope.get_current_scope()

    isolation_scope_before.set_tag("before_isolation_scope", 1)
    current_scope_before.set_tag("before_current_scope", 1)

    with isolation_scope() as scope:
        assert scope._type == ScopeType.ISOLATION

        isolation_scope_in = Scope.get_isolation_scope()
        current_scope_in = Scope.get_current_scope()

        assert isolation_scope_in._tags == {"before_isolation_scope": 1}
        assert current_scope_in._tags == {"before_current_scope": 1}
        assert scope._tags == {"before_isolation_scope": 1}

        scope.set_tag("in_with_scope", 1)

        assert isolation_scope_in._tags == {
            "before_isolation_scope": 1,
            "in_with_scope": 1,
        }
        assert current_scope_in._tags == {"before_current_scope": 1}
        assert scope._tags == {"before_isolation_scope": 1, "in_with_scope": 1}

        isolation_scope_in.set_tag("in_with_isolation_scope", 1)

        assert isolation_scope_in._tags == {
            "before_isolation_scope": 1,
            "in_with_scope": 1,
            "in_with_isolation_scope": 1,
        }
        assert current_scope_in._tags == {"before_current_scope": 1}
        assert scope._tags == {
            "before_isolation_scope": 1,
            "in_with_scope": 1,
            "in_with_isolation_scope": 1,
        }

        current_scope_in.set_tag("in_with_current_scope", 1)

        assert isolation_scope_in._tags == {
            "before_isolation_scope": 1,
            "in_with_scope": 1,
            "in_with_isolation_scope": 1,
        }
        assert current_scope_in._tags == {
            "before_current_scope": 1,
            "in_with_current_scope": 1,
        }
        assert scope._tags == {
            "before_isolation_scope": 1,
            "in_with_scope": 1,
            "in_with_isolation_scope": 1,
        }

    isolation_scope_after = Scope.get_isolation_scope()
    current_scope_after = Scope.get_current_scope()

    isolation_scope_after.set_tag("after_isolation_scope", 1)

    assert isolation_scope_after._tags == {
        "before_isolation_scope": 1,
        "after_isolation_scope": 1,
    }
    assert current_scope_after._tags == {"before_current_scope": 1}

    current_scope_after.set_tag("after_current_scope", 1)

    assert isolation_scope_after._tags == {
        "before_isolation_scope": 1,
        "after_isolation_scope": 1,
    }
    assert current_scope_after._tags == {
        "before_current_scope": 1,
        "after_current_scope": 1,
    }


def test_with_use_isolation_scope():
    original_isolation_scope = Scope.get_isolation_scope()
    original_current_scope = Scope.get_current_scope()
    custom_isolation_scope = Scope()

    with use_isolation_scope(custom_isolation_scope) as scope:
        assert scope._type is None  # our custom scope has not type set

        in_with_isolation_scope = Scope.get_isolation_scope()
        in_with_current_scope = Scope.get_current_scope()

        assert scope is custom_isolation_scope
        assert scope is in_with_isolation_scope
        assert scope is not in_with_current_scope
        assert scope is not original_isolation_scope
        assert scope is not original_current_scope
        assert in_with_isolation_scope is not original_isolation_scope
        assert in_with_current_scope is not original_current_scope

    after_with_current_scope = Scope.get_current_scope()
    after_with_isolation_scope = Scope.get_isolation_scope()

    assert after_with_isolation_scope is original_isolation_scope
    assert after_with_current_scope is original_current_scope
    assert after_with_isolation_scope is not custom_isolation_scope
    assert after_with_current_scope is not custom_isolation_scope


def test_with_use_isolation_scope_data():
    isolation_scope_before = Scope.get_isolation_scope()
    current_scope_before = Scope.get_current_scope()
    custom_isolation_scope = Scope()

    isolation_scope_before.set_tag("before_isolation_scope", 1)
    current_scope_before.set_tag("before_current_scope", 1)
    custom_isolation_scope.set_tag("before_custom_isolation_scope", 1)

    with use_isolation_scope(custom_isolation_scope) as scope:
        assert scope._type is None  # our custom scope has not type set

        isolation_scope_in = Scope.get_isolation_scope()
        current_scope_in = Scope.get_current_scope()

        assert isolation_scope_in._tags == {"before_custom_isolation_scope": 1}
        assert current_scope_in._tags == {"before_current_scope": 1}
        assert scope._tags == {"before_custom_isolation_scope": 1}

        scope.set_tag("in_with_scope", 1)

        assert isolation_scope_in._tags == {
            "before_custom_isolation_scope": 1,
            "in_with_scope": 1,
        }
        assert current_scope_in._tags == {"before_current_scope": 1}
        assert scope._tags == {"before_custom_isolation_scope": 1, "in_with_scope": 1}

        isolation_scope_in.set_tag("in_with_isolation_scope", 1)

        assert isolation_scope_in._tags == {
            "before_custom_isolation_scope": 1,
            "in_with_scope": 1,
            "in_with_isolation_scope": 1,
        }
        assert current_scope_in._tags == {"before_current_scope": 1}
        assert scope._tags == {
            "before_custom_isolation_scope": 1,
            "in_with_scope": 1,
            "in_with_isolation_scope": 1,
        }

        current_scope_in.set_tag("in_with_current_scope", 1)

        assert isolation_scope_in._tags == {
            "before_custom_isolation_scope": 1,
            "in_with_scope": 1,
            "in_with_isolation_scope": 1,
        }
        assert current_scope_in._tags == {
            "before_current_scope": 1,
            "in_with_current_scope": 1,
        }
        assert scope._tags == {
            "before_custom_isolation_scope": 1,
            "in_with_scope": 1,
            "in_with_isolation_scope": 1,
        }

    assert custom_isolation_scope._tags == {
        "before_custom_isolation_scope": 1,
        "in_with_scope": 1,
        "in_with_isolation_scope": 1,
    }
    isolation_scope_after = Scope.get_isolation_scope()
    current_scope_after = Scope.get_current_scope()

    isolation_scope_after.set_tag("after_isolation_scope", 1)

    assert isolation_scope_after._tags == {
        "before_isolation_scope": 1,
        "after_isolation_scope": 1,
    }
    assert current_scope_after._tags == {"before_current_scope": 1}
    assert custom_isolation_scope._tags == {
        "before_custom_isolation_scope": 1,
        "in_with_scope": 1,
        "in_with_isolation_scope": 1,
    }

    current_scope_after.set_tag("after_current_scope", 1)

    assert isolation_scope_after._tags == {
        "before_isolation_scope": 1,
        "after_isolation_scope": 1,
    }
    assert current_scope_after._tags == {
        "before_current_scope": 1,
        "after_current_scope": 1,
    }
    assert custom_isolation_scope._tags == {
        "before_custom_isolation_scope": 1,
        "in_with_scope": 1,
        "in_with_isolation_scope": 1,
    }


def test_with_new_scope():
    original_current_scope = Scope.get_current_scope()
    original_isolation_scope = Scope.get_isolation_scope()

    with new_scope() as scope:
        assert scope._type == ScopeType.CURRENT

        in_with_current_scope = Scope.get_current_scope()
        in_with_isolation_scope = Scope.get_isolation_scope()

        assert scope is in_with_current_scope
        assert in_with_current_scope is not original_current_scope
        assert in_with_isolation_scope is original_isolation_scope

    after_with_current_scope = Scope.get_current_scope()
    after_with_isolation_scope = Scope.get_isolation_scope()
    assert after_with_current_scope is original_current_scope
    assert after_with_isolation_scope is original_isolation_scope


def test_with_new_scope_data():
    """
    When doing `with new_scope()` the current scope is forked but the isolation
    scope stays untouched.
    """
    isolation_scope_before = Scope.get_isolation_scope()
    current_scope_before = Scope.get_current_scope()

    isolation_scope_before.set_tag("before_isolation_scope", 1)
    current_scope_before.set_tag("before_current_scope", 1)

    with new_scope() as scope:
        assert scope._type == ScopeType.CURRENT

        isolation_scope_in = Scope.get_isolation_scope()
        current_scope_in = Scope.get_current_scope()

        assert isolation_scope_in._tags == {"before_isolation_scope": 1}
        assert current_scope_in._tags == {"before_current_scope": 1}
        assert scope._tags == {"before_current_scope": 1}

        scope.set_tag("in_with_scope", 1)

        assert isolation_scope_in._tags == {"before_isolation_scope": 1}
        assert current_scope_in._tags == {"before_current_scope": 1, "in_with_scope": 1}
        assert scope._tags == {"before_current_scope": 1, "in_with_scope": 1}

        isolation_scope_in.set_tag("in_with_isolation_scope", 1)

        assert isolation_scope_in._tags == {
            "before_isolation_scope": 1,
            "in_with_isolation_scope": 1,
        }
        assert current_scope_in._tags == {"before_current_scope": 1, "in_with_scope": 1}
        assert scope._tags == {"before_current_scope": 1, "in_with_scope": 1}

        current_scope_in.set_tag("in_with_current_scope", 1)

        assert isolation_scope_in._tags == {
            "before_isolation_scope": 1,
            "in_with_isolation_scope": 1,
        }
        assert current_scope_in._tags == {
            "before_current_scope": 1,
            "in_with_scope": 1,
            "in_with_current_scope": 1,
        }
        assert scope._tags == {
            "before_current_scope": 1,
            "in_with_scope": 1,
            "in_with_current_scope": 1,
        }

    isolation_scope_after = Scope.get_isolation_scope()
    current_scope_after = Scope.get_current_scope()

    isolation_scope_after.set_tag("after_isolation_scope", 1)

    assert isolation_scope_after._tags == {
        "before_isolation_scope": 1,
        "in_with_isolation_scope": 1,
        "after_isolation_scope": 1,
    }
    assert current_scope_after._tags == {"before_current_scope": 1}

    current_scope_after.set_tag("after_current_scope", 1)

    assert isolation_scope_after._tags == {
        "before_isolation_scope": 1,
        "in_with_isolation_scope": 1,
        "after_isolation_scope": 1,
    }
    assert current_scope_after._tags == {
        "before_current_scope": 1,
        "after_current_scope": 1,
    }


def test_with_use_scope_data():
    isolation_scope_before = Scope.get_isolation_scope()
    current_scope_before = Scope.get_current_scope()
    custom_current_scope = Scope()

    isolation_scope_before.set_tag("before_isolation_scope", 1)
    current_scope_before.set_tag("before_current_scope", 1)
    custom_current_scope.set_tag("before_custom_current_scope", 1)

    with use_scope(custom_current_scope) as scope:
        assert scope._type is None  # our custom scope has not type set

        isolation_scope_in = Scope.get_isolation_scope()
        current_scope_in = Scope.get_current_scope()

        assert isolation_scope_in._tags == {"before_isolation_scope": 1}
        assert current_scope_in._tags == {"before_custom_current_scope": 1}
        assert scope._tags == {"before_custom_current_scope": 1}

        scope.set_tag("in_with_scope", 1)

        assert isolation_scope_in._tags == {"before_isolation_scope": 1}
        assert current_scope_in._tags == {
            "before_custom_current_scope": 1,
            "in_with_scope": 1,
        }
        assert scope._tags == {"before_custom_current_scope": 1, "in_with_scope": 1}

        isolation_scope_in.set_tag("in_with_isolation_scope", 1)

        assert isolation_scope_in._tags == {
            "before_isolation_scope": 1,
            "in_with_isolation_scope": 1,
        }
        assert current_scope_in._tags == {
            "before_custom_current_scope": 1,
            "in_with_scope": 1,
        }
        assert scope._tags == {"before_custom_current_scope": 1, "in_with_scope": 1}

        current_scope_in.set_tag("in_with_current_scope", 1)

        assert isolation_scope_in._tags == {
            "before_isolation_scope": 1,
            "in_with_isolation_scope": 1,
        }
        assert current_scope_in._tags == {
            "before_custom_current_scope": 1,
            "in_with_scope": 1,
            "in_with_current_scope": 1,
        }
        assert scope._tags == {
            "before_custom_current_scope": 1,
            "in_with_scope": 1,
            "in_with_current_scope": 1,
        }

    assert custom_current_scope._tags == {
        "before_custom_current_scope": 1,
        "in_with_scope": 1,
        "in_with_current_scope": 1,
    }
    isolation_scope_after = Scope.get_isolation_scope()
    current_scope_after = Scope.get_current_scope()

    isolation_scope_after.set_tag("after_isolation_scope", 1)

    assert isolation_scope_after._tags == {
        "before_isolation_scope": 1,
        "after_isolation_scope": 1,
        "in_with_isolation_scope": 1,
    }
    assert current_scope_after._tags == {"before_current_scope": 1}
    assert custom_current_scope._tags == {
        "before_custom_current_scope": 1,
        "in_with_scope": 1,
        "in_with_current_scope": 1,
    }

    current_scope_after.set_tag("after_current_scope", 1)

    assert isolation_scope_after._tags == {
        "before_isolation_scope": 1,
        "in_with_isolation_scope": 1,
        "after_isolation_scope": 1,
    }
    assert current_scope_after._tags == {
        "before_current_scope": 1,
        "after_current_scope": 1,
    }
    assert custom_current_scope._tags == {
        "before_custom_current_scope": 1,
        "in_with_scope": 1,
        "in_with_current_scope": 1,
    }


def test_nested_scopes_with_tags(sentry_init, capture_envelopes):
    sentry_init(traces_sample_rate=1.0)
    envelopes = capture_envelopes()

    with sentry_sdk.isolation_scope() as scope1:
        scope1.set_tag("isolation_scope1", 1)

        with sentry_sdk.new_scope() as scope2:
            scope2.set_tag("current_scope2", 1)

            with sentry_sdk.start_transaction(name="trx") as trx:
                trx.set_tag("trx", 1)

                with sentry_sdk.start_span(op="span1") as span1:
                    span1.set_tag("a", 1)

                    with new_scope() as scope3:
                        scope3.set_tag("current_scope3", 1)

                        with sentry_sdk.start_span(op="span2") as span2:
                            span2.set_tag("b", 1)

    (envelope,) = envelopes
    transaction = envelope.items[0].get_transaction_event()

    assert transaction["tags"] == {"isolation_scope1": 1, "current_scope2": 1, "trx": 1}
    assert transaction["spans"][0]["tags"] == {"a": 1}
    assert transaction["spans"][1]["tags"] == {"b": 1}


def test_should_send_default_pii_true(sentry_init):
    sentry_init(send_default_pii=True)

    assert should_send_default_pii() is True


def test_should_send_default_pii_false(sentry_init):
    sentry_init(send_default_pii=False)

    assert should_send_default_pii() is False


def test_set_tags():
    scope = Scope()
    scope.set_tags({"tag1": "value1", "tag2": "value2"})
    event = scope.apply_to_event({}, {})

    assert event["tags"] == {"tag1": "value1", "tag2": "value2"}, "Setting tags failed"

    scope.set_tags({"tag2": "updated", "tag3": "new"})
    event = scope.apply_to_event({}, {})

    assert event["tags"] == {
        "tag1": "value1",
        "tag2": "updated",
        "tag3": "new",
    }, "Updating tags failed"

    scope.set_tags({})
    event = scope.apply_to_event({}, {})

    assert event["tags"] == {
        "tag1": "value1",
        "tag2": "updated",
        "tag3": "new",
    }, "Updating tags with empty dict changed tags"


def test_last_event_id(sentry_init):
    sentry_init(enable_tracing=True)

    assert Scope.last_event_id() is None

    sentry_sdk.capture_exception(Exception("test"))

    assert Scope.last_event_id() is not None


def test_last_event_id_transaction(sentry_init):
    sentry_init(enable_tracing=True)

    assert Scope.last_event_id() is None

    with sentry_sdk.start_transaction(name="test"):
        pass

    assert Scope.last_event_id() is None, "Transaction should not set last_event_id"


def test_last_event_id_cleared(sentry_init):
    sentry_init(enable_tracing=True)

    # Make sure last_event_id is set
    sentry_sdk.capture_exception(Exception("test"))
    assert Scope.last_event_id() is not None

    # Clearing the isolation scope should clear the last_event_id
    Scope.get_isolation_scope().clear()

    assert Scope.last_event_id() is None, "last_event_id should be cleared"
sentry-python-2.18.0/tests/test_scrubber.py000066400000000000000000000153301471214654000210350ustar00rootroot00000000000000import sys
import logging

from sentry_sdk import capture_exception, capture_event, start_transaction, start_span
from sentry_sdk.utils import event_from_exception
from sentry_sdk.scrubber import EventScrubber
from tests.conftest import ApproxDict


logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)


def test_request_scrubbing(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        1 / 0
    except ZeroDivisionError:
        ev, _hint = event_from_exception(sys.exc_info())

        ev["request"] = {
            "headers": {
                "COOKIE": "secret",
                "authorization": "Bearer bla",
                "ORIGIN": "google.com",
                "ip_address": "127.0.0.1",
            },
            "cookies": {
                "sessionid": "secret",
                "foo": "bar",
            },
            "data": {
                "token": "secret",
                "foo": "bar",
            },
        }

        capture_event(ev)

    (event,) = events

    assert event["request"] == {
        "headers": {
            "COOKIE": "[Filtered]",
            "authorization": "[Filtered]",
            "ORIGIN": "google.com",
            "ip_address": "[Filtered]",
        },
        "cookies": {"sessionid": "[Filtered]", "foo": "bar"},
        "data": {"token": "[Filtered]", "foo": "bar"},
    }

    assert event["_meta"]["request"] == {
        "headers": {
            "COOKIE": {"": {"rem": [["!config", "s"]]}},
            "authorization": {"": {"rem": [["!config", "s"]]}},
            "ip_address": {"": {"rem": [["!config", "s"]]}},
        },
        "cookies": {"sessionid": {"": {"rem": [["!config", "s"]]}}},
        "data": {"token": {"": {"rem": [["!config", "s"]]}}},
    }


def test_ip_address_not_scrubbed_when_pii_enabled(sentry_init, capture_events):
    sentry_init(send_default_pii=True)
    events = capture_events()

    try:
        1 / 0
    except ZeroDivisionError:
        ev, _hint = event_from_exception(sys.exc_info())

        ev["request"] = {"headers": {"COOKIE": "secret", "ip_address": "127.0.0.1"}}

        capture_event(ev)

    (event,) = events

    assert event["request"] == {
        "headers": {"COOKIE": "[Filtered]", "ip_address": "127.0.0.1"}
    }

    assert event["_meta"]["request"] == {
        "headers": {
            "COOKIE": {"": {"rem": [["!config", "s"]]}},
        }
    }


def test_stack_var_scrubbing(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        password = "supersecret"  # noqa
        api_key = "1231231231"  # noqa
        safe = "keepthis"  # noqa
        1 / 0
    except ZeroDivisionError:
        capture_exception()

    (event,) = events

    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
    (frame,) = frames
    assert frame["vars"]["password"] == "[Filtered]"
    assert frame["vars"]["api_key"] == "[Filtered]"
    assert frame["vars"]["safe"] == "'keepthis'"

    meta = event["_meta"]["exception"]["values"]["0"]["stacktrace"]["frames"]["0"][
        "vars"
    ]
    assert meta == {
        "password": {"": {"rem": [["!config", "s"]]}},
        "api_key": {"": {"rem": [["!config", "s"]]}},
    }


def test_breadcrumb_extra_scrubbing(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    logger.info("bread", extra=dict(foo=42, password="secret"))
    logger.critical("whoops", extra=dict(bar=69, auth="secret"))

    (event,) = events

    assert event["extra"]["bar"] == 69
    assert event["extra"]["auth"] == "[Filtered]"

    assert event["breadcrumbs"]["values"][0]["data"] == {
        "foo": 42,
        "password": "[Filtered]",
    }

    assert event["_meta"]["extra"]["auth"] == {"": {"rem": [["!config", "s"]]}}
    assert event["_meta"]["breadcrumbs"] == {
        "values": {"0": {"data": {"password": {"": {"rem": [["!config", "s"]]}}}}}
    }


def test_span_data_scrubbing(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction(name="hi"):
        with start_span(op="foo", name="bar") as span:
            span.set_data("password", "secret")
            span.set_data("datafoo", "databar")

    (event,) = events
    assert event["spans"][0]["data"] == ApproxDict(
        {"password": "[Filtered]", "datafoo": "databar"}
    )
    assert event["_meta"]["spans"] == {
        "0": {"data": {"password": {"": {"rem": [["!config", "s"]]}}}}
    }


def test_custom_denylist(sentry_init, capture_events):
    sentry_init(
        event_scrubber=EventScrubber(
            denylist=["my_sensitive_var"], pii_denylist=["my_pii_var"]
        )
    )
    events = capture_events()

    try:
        my_sensitive_var = "secret"  # noqa
        my_pii_var = "jane.doe"  # noqa
        safe = "keepthis"  # noqa
        1 / 0
    except ZeroDivisionError:
        capture_exception()

    (event,) = events

    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
    (frame,) = frames
    assert frame["vars"]["my_sensitive_var"] == "[Filtered]"
    assert frame["vars"]["my_pii_var"] == "[Filtered]"
    assert frame["vars"]["safe"] == "'keepthis'"

    meta = event["_meta"]["exception"]["values"]["0"]["stacktrace"]["frames"]["0"][
        "vars"
    ]
    assert meta == {
        "my_sensitive_var": {"": {"rem": [["!config", "s"]]}},
        "my_pii_var": {"": {"rem": [["!config", "s"]]}},
    }


def test_scrubbing_doesnt_affect_local_vars(sentry_init, capture_events):
    sentry_init()
    events = capture_events()

    try:
        password = "cat123"
        1 / 0
    except ZeroDivisionError:
        capture_exception()

    (event,) = events

    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
    (frame,) = frames
    assert frame["vars"]["password"] == "[Filtered]"
    assert password == "cat123"


def test_recursive_event_scrubber(sentry_init, capture_events):
    sentry_init(event_scrubber=EventScrubber(recursive=True))
    events = capture_events()
    complex_structure = {
        "deep": {
            "deeper": [{"deepest": {"password": "my_darkest_secret"}}],
        },
    }

    capture_event({"extra": complex_structure})

    (event,) = events
    assert event["extra"]["deep"]["deeper"][0]["deepest"]["password"] == "'[Filtered]'"


def test_recursive_scrubber_does_not_override_original(sentry_init, capture_events):
    sentry_init(event_scrubber=EventScrubber(recursive=True))
    events = capture_events()

    data = {"csrf": "secret"}
    try:
        raise RuntimeError("An error")
    except Exception:
        capture_exception()

    (event,) = events
    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
    (frame,) = frames
    assert data["csrf"] == "secret"
    assert frame["vars"]["data"]["csrf"] == "[Filtered]"
sentry-python-2.18.0/tests/test_serializer.py000066400000000000000000000121201471214654000213710ustar00rootroot00000000000000import re

import pytest

from sentry_sdk.serializer import MAX_DATABAG_BREADTH, MAX_DATABAG_DEPTH, serialize

try:
    from hypothesis import given
    import hypothesis.strategies as st
except ImportError:
    pass
else:

    def test_bytes_serialization_decode_many(message_normalizer):
        @given(binary=st.binary(min_size=1))
        def inner(binary):
            result = message_normalizer(binary, should_repr_strings=False)
            assert result == binary.decode("utf-8", "replace")

        inner()

    def test_bytes_serialization_repr_many(message_normalizer):
        @given(binary=st.binary(min_size=1))
        def inner(binary):
            result = message_normalizer(binary, should_repr_strings=True)
            assert result == repr(binary)

        inner()


@pytest.fixture
def message_normalizer(validate_event_schema):
    def inner(message, **kwargs):
        event = serialize({"logentry": {"message": message}}, **kwargs)
        validate_event_schema(event)
        return event["logentry"]["message"]

    return inner


@pytest.fixture
def extra_normalizer(validate_event_schema):
    def inner(extra, **kwargs):
        event = serialize({"extra": {"foo": extra}}, **kwargs)
        validate_event_schema(event)
        return event["extra"]["foo"]

    return inner


@pytest.fixture
def body_normalizer(validate_event_schema):
    def inner(body, **kwargs):
        event = serialize({"request": {"data": body}}, **kwargs)
        validate_event_schema(event)
        return event["request"]["data"]

    return inner


def test_bytes_serialization_decode(message_normalizer):
    binary = b"abc123\x80\xf0\x9f\x8d\x95"
    result = message_normalizer(binary, should_repr_strings=False)
    assert result == "abc123\ufffd\U0001f355"


def test_bytes_serialization_repr(message_normalizer):
    binary = b"abc123\x80\xf0\x9f\x8d\x95"
    result = message_normalizer(binary, should_repr_strings=True)
    assert result == r"b'abc123\x80\xf0\x9f\x8d\x95'"


def test_bytearray_serialization_decode(message_normalizer):
    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
    result = message_normalizer(binary, should_repr_strings=False)
    assert result == "abc123\ufffd\U0001f355"


def test_bytearray_serialization_repr(message_normalizer):
    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
    result = message_normalizer(binary, should_repr_strings=True)
    assert result == r"bytearray(b'abc123\x80\xf0\x9f\x8d\x95')"


def test_memoryview_serialization_repr(message_normalizer):
    binary = memoryview(b"abc123\x80\xf0\x9f\x8d\x95")
    result = message_normalizer(binary, should_repr_strings=False)
    assert re.match(r"^$", result)


def test_serialize_sets(extra_normalizer):
    result = extra_normalizer({1, 2, 3})
    assert result == [1, 2, 3]


def test_serialize_custom_mapping(extra_normalizer):
    class CustomReprDict(dict):
        def __sentry_repr__(self):
            return "custom!"

    result = extra_normalizer(CustomReprDict(one=1, two=2))
    assert result == "custom!"


def test_custom_mapping_doesnt_mess_with_mock(extra_normalizer):
    """
    Adding the __sentry_repr__ magic method check in the serializer
    shouldn't mess with how mock works. This broke some stuff when we added
    sentry_repr without the dunders.
    """
    mock = pytest.importorskip("unittest.mock")
    m = mock.Mock()
    extra_normalizer(m)
    assert len(m.mock_calls) == 0


def test_custom_repr(extra_normalizer):
    class Foo:
        pass

    def custom_repr(value):
        if isinstance(value, Foo):
            return "custom"
        else:
            return value

    result = extra_normalizer({"foo": Foo(), "string": "abc"}, custom_repr=custom_repr)
    assert result == {"foo": "custom", "string": "abc"}


def test_custom_repr_graceful_fallback_to_safe_repr(extra_normalizer):
    class Foo:
        pass

    def custom_repr(value):
        raise ValueError("oops")

    result = extra_normalizer({"foo": Foo()}, custom_repr=custom_repr)
    assert "Foo object" in result["foo"]


def test_trim_databag_breadth(body_normalizer):
    data = {
        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
    }

    result = body_normalizer(data)

    assert len(result) == MAX_DATABAG_BREADTH
    for key, value in result.items():
        assert data.get(key) == value


def test_no_trimming_if_max_request_body_size_is_always(body_normalizer):
    data = {
        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
    }
    curr = data
    for _ in range(MAX_DATABAG_DEPTH + 5):
        curr["nested"] = {}
        curr = curr["nested"]

    result = body_normalizer(data, max_request_body_size="always")

    assert result == data


def test_max_value_length_default(body_normalizer):
    data = {"key": "a" * 2000}

    result = body_normalizer(data)

    assert len(result["key"]) == 1024  # fallback max length


def test_max_value_length(body_normalizer):
    data = {"key": "a" * 2000}

    max_value_length = 1800
    result = body_normalizer(data, max_value_length=max_value_length)

    assert len(result["key"]) == max_value_length
sentry-python-2.18.0/tests/test_sessions.py000066400000000000000000000167041471214654000211020ustar00rootroot00000000000000from unittest import mock

import sentry_sdk
from sentry_sdk.sessions import auto_session_tracking, track_session


def sorted_aggregates(item):
    aggregates = item["aggregates"]
    aggregates.sort(key=lambda item: (item["started"], item.get("did", "")))
    return aggregates


def test_basic(sentry_init, capture_envelopes):
    sentry_init(release="fun-release", environment="not-fun-env")
    envelopes = capture_envelopes()

    sentry_sdk.get_isolation_scope().start_session()

    try:
        scope = sentry_sdk.get_current_scope()
        scope.set_user({"id": "42"})
        raise Exception("all is wrong")
    except Exception:
        sentry_sdk.capture_exception()

    sentry_sdk.get_isolation_scope().end_session()
    sentry_sdk.flush()

    assert len(envelopes) == 2
    assert envelopes[0].get_event() is not None

    sess = envelopes[1]
    assert len(sess.items) == 1
    sess_event = sess.items[0].payload.json

    assert sess_event["attrs"] == {
        "release": "fun-release",
        "environment": "not-fun-env",
    }
    assert sess_event["did"] == "42"
    assert sess_event["init"]
    assert sess_event["status"] == "exited"
    assert sess_event["errors"] == 1


def test_aggregates(sentry_init, capture_envelopes):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
    )
    envelopes = capture_envelopes()

    with sentry_sdk.isolation_scope() as scope:
        with track_session(scope, session_mode="request"):
            try:
                scope.set_user({"id": "42"})
                raise Exception("all is wrong")
            except Exception:
                sentry_sdk.capture_exception()

    with sentry_sdk.isolation_scope() as scope:
        with track_session(scope, session_mode="request"):
            pass

    sentry_sdk.get_isolation_scope().start_session(session_mode="request")
    sentry_sdk.get_isolation_scope().end_session()
    sentry_sdk.flush()

    assert len(envelopes) == 2
    assert envelopes[0].get_event() is not None

    sess = envelopes[1]
    assert len(sess.items) == 1
    sess_event = sess.items[0].payload.json
    assert sess_event["attrs"] == {
        "release": "fun-release",
        "environment": "not-fun-env",
    }

    aggregates = sorted_aggregates(sess_event)
    assert len(aggregates) == 1
    assert aggregates[0]["exited"] == 2
    assert aggregates[0]["errored"] == 1


def test_aggregates_deprecated(
    sentry_init, capture_envelopes, suppress_deprecation_warnings
):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
    )
    envelopes = capture_envelopes()

    with auto_session_tracking(session_mode="request"):
        with sentry_sdk.new_scope() as scope:
            try:
                scope.set_user({"id": "42"})
                raise Exception("all is wrong")
            except Exception:
                sentry_sdk.capture_exception()

    with auto_session_tracking(session_mode="request"):
        pass

    sentry_sdk.get_isolation_scope().start_session(session_mode="request")
    sentry_sdk.get_isolation_scope().end_session()
    sentry_sdk.flush()

    assert len(envelopes) == 2
    assert envelopes[0].get_event() is not None

    sess = envelopes[1]
    assert len(sess.items) == 1
    sess_event = sess.items[0].payload.json
    assert sess_event["attrs"] == {
        "release": "fun-release",
        "environment": "not-fun-env",
    }

    aggregates = sorted_aggregates(sess_event)
    assert len(aggregates) == 1
    assert aggregates[0]["exited"] == 2
    assert aggregates[0]["errored"] == 1


def test_aggregates_explicitly_disabled_session_tracking_request_mode(
    sentry_init, capture_envelopes
):
    sentry_init(
        release="fun-release", environment="not-fun-env", auto_session_tracking=False
    )
    envelopes = capture_envelopes()

    with sentry_sdk.isolation_scope() as scope:
        with track_session(scope, session_mode="request"):
            try:
                raise Exception("all is wrong")
            except Exception:
                sentry_sdk.capture_exception()

    with sentry_sdk.isolation_scope() as scope:
        with track_session(scope, session_mode="request"):
            pass

    sentry_sdk.get_isolation_scope().start_session(session_mode="request")
    sentry_sdk.get_isolation_scope().end_session()
    sentry_sdk.flush()

    sess = envelopes[1]
    assert len(sess.items) == 1
    sess_event = sess.items[0].payload.json

    aggregates = sorted_aggregates(sess_event)
    assert len(aggregates) == 1
    assert aggregates[0]["exited"] == 1
    assert "errored" not in aggregates[0]


def test_aggregates_explicitly_disabled_session_tracking_request_mode_deprecated(
    sentry_init, capture_envelopes, suppress_deprecation_warnings
):
    sentry_init(
        release="fun-release", environment="not-fun-env", auto_session_tracking=False
    )
    envelopes = capture_envelopes()

    with auto_session_tracking(session_mode="request"):
        with sentry_sdk.new_scope():
            try:
                raise Exception("all is wrong")
            except Exception:
                sentry_sdk.capture_exception()

    with auto_session_tracking(session_mode="request"):
        pass

    sentry_sdk.get_isolation_scope().start_session(session_mode="request")
    sentry_sdk.get_isolation_scope().end_session()
    sentry_sdk.flush()

    sess = envelopes[1]
    assert len(sess.items) == 1
    sess_event = sess.items[0].payload.json

    aggregates = sorted_aggregates(sess_event)
    assert len(aggregates) == 1
    assert aggregates[0]["exited"] == 1
    assert "errored" not in aggregates[0]


def test_no_thread_on_shutdown_no_errors(sentry_init):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
    )

    # make it seem like the interpreter is shutting down
    with mock.patch(
        "threading.Thread.start",
        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
    ):
        with sentry_sdk.isolation_scope() as scope:
            with track_session(scope, session_mode="request"):
                try:
                    raise Exception("all is wrong")
                except Exception:
                    sentry_sdk.capture_exception()

        with sentry_sdk.isolation_scope() as scope:
            with track_session(scope, session_mode="request"):
                pass

        sentry_sdk.get_isolation_scope().start_session(session_mode="request")
        sentry_sdk.get_isolation_scope().end_session()
        sentry_sdk.flush()

    # If we reach this point without error, the test is successful.


def test_no_thread_on_shutdown_no_errors_deprecated(
    sentry_init, suppress_deprecation_warnings
):
    sentry_init(
        release="fun-release",
        environment="not-fun-env",
    )

    # make it seem like the interpreter is shutting down
    with mock.patch(
        "threading.Thread.start",
        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
    ):
        with auto_session_tracking(session_mode="request"):
            with sentry_sdk.new_scope():
                try:
                    raise Exception("all is wrong")
                except Exception:
                    sentry_sdk.capture_exception()

        with auto_session_tracking(session_mode="request"):
            pass

        sentry_sdk.get_isolation_scope().start_session(session_mode="request")
        sentry_sdk.get_isolation_scope().end_session()
        sentry_sdk.flush()

    # If we reach this point without error, the test is successful.
sentry-python-2.18.0/tests/test_spotlight.py000066400000000000000000000027251471214654000212470ustar00rootroot00000000000000import pytest

import sentry_sdk


@pytest.fixture
def capture_spotlight_envelopes(monkeypatch):
    def inner():
        envelopes = []
        test_spotlight = sentry_sdk.get_client().spotlight
        old_capture_envelope = test_spotlight.capture_envelope

        def append_envelope(envelope):
            envelopes.append(envelope)
            return old_capture_envelope(envelope)

        monkeypatch.setattr(test_spotlight, "capture_envelope", append_envelope)
        return envelopes

    return inner


def test_spotlight_off_by_default(sentry_init):
    sentry_init()
    assert sentry_sdk.get_client().spotlight is None


def test_spotlight_default_url(sentry_init):
    sentry_init(spotlight=True)

    spotlight = sentry_sdk.get_client().spotlight
    assert spotlight is not None
    assert spotlight.url == "http://localhost:8969/stream"


def test_spotlight_custom_url(sentry_init):
    sentry_init(spotlight="http://foobar@test.com/132")

    spotlight = sentry_sdk.get_client().spotlight
    assert spotlight is not None
    assert spotlight.url == "http://foobar@test.com/132"


def test_spotlight_envelope(sentry_init, capture_spotlight_envelopes):
    sentry_init(spotlight=True)
    envelopes = capture_spotlight_envelopes()

    try:
        raise ValueError("aha!")
    except Exception:
        sentry_sdk.capture_exception()

    (envelope,) = envelopes
    payload = envelope.items[0].payload.json

    assert payload["exception"]["values"][0]["value"] == "aha!"
sentry-python-2.18.0/tests/test_tracing_utils.py000066400000000000000000000064631471214654000221040ustar00rootroot00000000000000from dataclasses import asdict, dataclass
from typing import Optional, List

from sentry_sdk.tracing_utils import _should_be_included
import pytest


def id_function(val):
    # type: (object) -> str
    if isinstance(val, ShouldBeIncludedTestCase):
        return val.id


@dataclass(frozen=True)
class ShouldBeIncludedTestCase:
    id: str
    is_sentry_sdk_frame: bool
    namespace: Optional[str] = None
    in_app_include: Optional[List[str]] = None
    in_app_exclude: Optional[List[str]] = None
    abs_path: Optional[str] = None
    project_root: Optional[str] = None


@pytest.mark.parametrize(
    "test_case, expected",
    [
        (
            ShouldBeIncludedTestCase(
                id="Frame from Sentry SDK",
                is_sentry_sdk_frame=True,
            ),
            False,
        ),
        (
            ShouldBeIncludedTestCase(
                id="Frame from Django installed in virtualenv inside project root",
                is_sentry_sdk_frame=False,
                abs_path="/home/username/some_project/.venv/lib/python3.12/site-packages/django/db/models/sql/compiler",
                project_root="/home/username/some_project",
                namespace="django.db.models.sql.compiler",
                in_app_include=["django"],
            ),
            True,
        ),
        (
            ShouldBeIncludedTestCase(
                id="Frame from project",
                is_sentry_sdk_frame=False,
                abs_path="/home/username/some_project/some_project/__init__.py",
                project_root="/home/username/some_project",
                namespace="some_project",
            ),
            True,
        ),
        (
            ShouldBeIncludedTestCase(
                id="Frame from project module in `in_app_exclude`",
                is_sentry_sdk_frame=False,
                abs_path="/home/username/some_project/some_project/exclude_me/some_module.py",
                project_root="/home/username/some_project",
                namespace="some_project.exclude_me.some_module",
                in_app_exclude=["some_project.exclude_me"],
            ),
            False,
        ),
        (
            ShouldBeIncludedTestCase(
                id="Frame from system-wide installed Django",
                is_sentry_sdk_frame=False,
                abs_path="/usr/lib/python3.12/site-packages/django/db/models/sql/compiler",
                project_root="/home/username/some_project",
                namespace="django.db.models.sql.compiler",
            ),
            False,
        ),
        (
            ShouldBeIncludedTestCase(
                id="Frame from system-wide installed Django with `django` in `in_app_include`",
                is_sentry_sdk_frame=False,
                abs_path="/usr/lib/python3.12/site-packages/django/db/models/sql/compiler",
                project_root="/home/username/some_project",
                namespace="django.db.models.sql.compiler",
                in_app_include=["django"],
            ),
            True,
        ),
    ],
    ids=id_function,
)
def test_should_be_included(test_case, expected):
    # type: (ShouldBeIncludedTestCase, bool) -> None
    """Checking logic, see: https://github.com/getsentry/sentry-python/issues/3312"""
    kwargs = asdict(test_case)
    kwargs.pop("id")
    assert _should_be_included(**kwargs) == expected
sentry-python-2.18.0/tests/test_transport.py000066400000000000000000000577261471214654000213010ustar00rootroot00000000000000import logging
import pickle
import gzip
import io
import os
import socket
import sys
from collections import defaultdict, namedtuple
from datetime import datetime, timedelta, timezone
from unittest import mock

import brotli
import pytest
from pytest_localserver.http import WSGIServer
from werkzeug.wrappers import Request, Response

try:
    import gevent
except ImportError:
    gevent = None

import sentry_sdk
from sentry_sdk import (
    Client,
    add_breadcrumb,
    capture_message,
    isolation_scope,
    get_isolation_scope,
    Hub,
)
from sentry_sdk._compat import PY37, PY38
from sentry_sdk.envelope import Envelope, Item, parse_json
from sentry_sdk.transport import (
    KEEP_ALIVE_SOCKET_OPTIONS,
    _parse_rate_limits,
    HttpTransport,
)
from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger

CapturedData = namedtuple("CapturedData", ["path", "event", "envelope", "compressed"])


class CapturingServer(WSGIServer):
    def __init__(self, host="127.0.0.1", port=0, ssl_context=None):
        WSGIServer.__init__(self, host, port, self, ssl_context=ssl_context)
        self.code = 204
        self.headers = {}
        self.captured = []

    def respond_with(self, code=200, headers=None):
        self.code = code
        if headers:
            self.headers = headers

    def clear_captured(self):
        del self.captured[:]

    def __call__(self, environ, start_response):
        """
        This is the WSGI application.
        """
        request = Request(environ)
        event = envelope = None
        content_encoding = request.headers.get("content-encoding")
        if content_encoding == "gzip":
            rdr = gzip.GzipFile(fileobj=io.BytesIO(request.data))
            compressed = True
        elif content_encoding == "br":
            rdr = io.BytesIO(brotli.decompress(request.data))
            compressed = True
        else:
            rdr = io.BytesIO(request.data)
            compressed = False

        if request.mimetype == "application/json":
            event = parse_json(rdr.read())
        else:
            envelope = Envelope.deserialize_from(rdr)

        self.captured.append(
            CapturedData(
                path=request.path,
                event=event,
                envelope=envelope,
                compressed=compressed,
            )
        )

        response = Response(status=self.code)
        response.headers.extend(self.headers)
        return response(environ, start_response)


@pytest.fixture
def capturing_server(request):
    server = CapturingServer()
    server.start()
    request.addfinalizer(server.stop)
    return server


@pytest.fixture
def make_client(request, capturing_server):
    def inner(**kwargs):
        return Client(
            "http://foobar@{}/132".format(capturing_server.url[len("http://") :]),
            **kwargs,
        )

    return inner


def mock_transaction_envelope(span_count):
    # type: (int) -> Envelope
    event = defaultdict(
        mock.MagicMock,
        type="transaction",
        spans=[mock.MagicMock() for _ in range(span_count)],
    )

    envelope = Envelope()
    envelope.add_transaction(event)

    return envelope


@pytest.mark.forked
@pytest.mark.parametrize("debug", (True, False))
@pytest.mark.parametrize("client_flush_method", ["close", "flush"])
@pytest.mark.parametrize("use_pickle", (True, False))
@pytest.mark.parametrize("compression_level", (0, 9, None))
@pytest.mark.parametrize(
    "compression_algo",
    (
        ("gzip", "br", "", None)
        if PY37 or gevent is None
        else ("gzip", "", None)
    ),
)
@pytest.mark.parametrize("http2", [True, False] if PY38 else [False])
def test_transport_works(
    capturing_server,
    request,
    capsys,
    caplog,
    debug,
    make_client,
    client_flush_method,
    use_pickle,
    compression_level,
    compression_algo,
    http2,
    maybe_monkeypatched_threading,
):
    caplog.set_level(logging.DEBUG)

    experiments = {}
    if compression_level is not None:
        experiments["transport_compression_level"] = compression_level

    if compression_algo is not None:
        experiments["transport_compression_algo"] = compression_algo

    if http2:
        experiments["transport_http2"] = True

    client = make_client(
        debug=debug,
        _experiments=experiments,
    )

    if use_pickle:
        client = pickle.loads(pickle.dumps(client))

    sentry_sdk.get_global_scope().set_client(client)
    request.addfinalizer(lambda: sentry_sdk.get_global_scope().set_client(None))

    add_breadcrumb(
        level="info", message="i like bread", timestamp=datetime.now(timezone.utc)
    )
    capture_message("löl")

    getattr(client, client_flush_method)()

    out, err = capsys.readouterr()
    assert not err and not out
    assert capturing_server.captured
    should_compress = (
        # default is to compress with brotli if available, gzip otherwise
        (compression_level is None)
        or (
            # setting compression level to 0 means don't compress
            compression_level
            > 0
        )
    ) and (
        # if we couldn't resolve to a known algo, we don't compress
        compression_algo
        != ""
    )

    assert capturing_server.captured[0].compressed == should_compress

    assert any("Sending envelope" in record.msg for record in caplog.records) == debug


@pytest.mark.parametrize(
    "num_pools,expected_num_pools",
    (
        (None, 2),
        (2, 2),
        (10, 10),
    ),
)
def test_transport_num_pools(make_client, num_pools, expected_num_pools):
    _experiments = {}
    if num_pools is not None:
        _experiments["transport_num_pools"] = num_pools

    client = make_client(_experiments=_experiments)

    options = client.transport._get_pool_options()
    assert options["num_pools"] == expected_num_pools


@pytest.mark.parametrize(
    "http2", [True, False] if sys.version_info >= (3, 8) else [False]
)
def test_two_way_ssl_authentication(make_client, http2):
    _experiments = {}
    if http2:
        _experiments["transport_http2"] = True

    current_dir = os.path.dirname(__file__)
    cert_file = f"{current_dir}/test.pem"
    key_file = f"{current_dir}/test.key"
    client = make_client(
        cert_file=cert_file,
        key_file=key_file,
        _experiments=_experiments,
    )
    options = client.transport._get_pool_options()

    if http2:
        assert options["ssl_context"] is not None
    else:
        assert options["cert_file"] == cert_file
        assert options["key_file"] == key_file


def test_socket_options(make_client):
    socket_options = [
        (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
        (socket.SOL_TCP, socket.TCP_KEEPINTVL, 10),
        (socket.SOL_TCP, socket.TCP_KEEPCNT, 6),
    ]

    client = make_client(socket_options=socket_options)

    options = client.transport._get_pool_options()
    assert options["socket_options"] == socket_options


def test_keep_alive_true(make_client):
    client = make_client(keep_alive=True)

    options = client.transport._get_pool_options()
    assert options["socket_options"] == KEEP_ALIVE_SOCKET_OPTIONS


def test_keep_alive_on_by_default(make_client):
    client = make_client()
    options = client.transport._get_pool_options()
    assert "socket_options" not in options


@pytest.mark.skipif(not PY38, reason="HTTP2 libraries are only available in py3.8+")
def test_http2_with_https_dsn(make_client):
    client = make_client(_experiments={"transport_http2": True})
    client.transport.parsed_dsn.scheme = "https"
    options = client.transport._get_pool_options()
    assert options["http2"] is True


@pytest.mark.skipif(not PY38, reason="HTTP2 libraries are only available in py3.8+")
def test_no_http2_with_http_dsn(make_client):
    client = make_client(_experiments={"transport_http2": True})
    client.transport.parsed_dsn.scheme = "http"
    options = client.transport._get_pool_options()
    assert options["http2"] is False


def test_socket_options_override_keep_alive(make_client):
    socket_options = [
        (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
        (socket.SOL_TCP, socket.TCP_KEEPINTVL, 10),
        (socket.SOL_TCP, socket.TCP_KEEPCNT, 6),
    ]

    client = make_client(socket_options=socket_options, keep_alive=False)

    options = client.transport._get_pool_options()
    assert options["socket_options"] == socket_options


def test_socket_options_merge_with_keep_alive(make_client):
    socket_options = [
        (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 42),
        (socket.SOL_TCP, socket.TCP_KEEPINTVL, 42),
    ]

    client = make_client(socket_options=socket_options, keep_alive=True)

    options = client.transport._get_pool_options()
    try:
        assert options["socket_options"] == [
            (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 42),
            (socket.SOL_TCP, socket.TCP_KEEPINTVL, 42),
            (socket.SOL_TCP, socket.TCP_KEEPIDLE, 45),
            (socket.SOL_TCP, socket.TCP_KEEPCNT, 6),
        ]
    except AttributeError:
        assert options["socket_options"] == [
            (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 42),
            (socket.SOL_TCP, socket.TCP_KEEPINTVL, 42),
            (socket.SOL_TCP, socket.TCP_KEEPCNT, 6),
        ]


def test_socket_options_override_defaults(make_client):
    # If socket_options are set to [], this doesn't mean the user doesn't want
    # any custom socket_options, but rather that they want to disable the urllib3
    # socket option defaults, so we need to set this and not ignore it.
    client = make_client(socket_options=[])

    options = client.transport._get_pool_options()
    assert options["socket_options"] == []


def test_transport_infinite_loop(capturing_server, request, make_client):
    client = make_client(
        debug=True,
        # Make sure we cannot create events from our own logging
        integrations=[LoggingIntegration(event_level=logging.DEBUG)],
    )

    # I am not sure why, but "werkzeug" logger makes an INFO log on sending
    # the message "hi" and does creates an infinite look.
    # Ignoring this for breaking the infinite loop and still we can test
    # that our own log messages (sent from `_IGNORED_LOGGERS`) are not leading
    # to an infinite loop
    ignore_logger("werkzeug")

    sentry_sdk.get_global_scope().set_client(client)
    with isolation_scope():
        capture_message("hi")
        client.flush()

    assert len(capturing_server.captured) == 1


def test_transport_no_thread_on_shutdown_no_errors(capturing_server, make_client):
    client = make_client()

    # make it seem like the interpreter is shutting down
    with mock.patch(
        "threading.Thread.start",
        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
    ):
        sentry_sdk.get_global_scope().set_client(client)
        with isolation_scope():
            capture_message("hi")

    # nothing exploded but also no events can be sent anymore
    assert len(capturing_server.captured) == 0


NOW = datetime(2014, 6, 2)


@pytest.mark.parametrize(
    "input,expected",
    [
        # Invalid rate limits
        ("", {}),
        ("invalid", {}),
        (",,,", {}),
        (
            "42::organization, invalid, 4711:foobar;transaction;security:project",
            {
                None: NOW + timedelta(seconds=42),
                "transaction": NOW + timedelta(seconds=4711),
                "security": NOW + timedelta(seconds=4711),
                # Unknown data categories
                "foobar": NOW + timedelta(seconds=4711),
            },
        ),
        (
            "4711:foobar;;transaction:organization",
            {
                "transaction": NOW + timedelta(seconds=4711),
                # Unknown data categories
                "foobar": NOW + timedelta(seconds=4711),
                "": NOW + timedelta(seconds=4711),
            },
        ),
    ],
)
def test_parse_rate_limits(input, expected):
    assert dict(_parse_rate_limits(input, now=NOW)) == expected


def test_simple_rate_limits(capturing_server, make_client):
    client = make_client()
    capturing_server.respond_with(code=429, headers={"Retry-After": "4"})

    client.capture_event({"type": "transaction"})
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"
    capturing_server.clear_captured()

    assert set(client.transport._disabled_until) == set([None])

    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "event"})
    client.flush()

    assert not capturing_server.captured


@pytest.mark.parametrize("response_code", [200, 429])
def test_data_category_limits(
    capturing_server, response_code, make_client, monkeypatch
):
    client = make_client(send_client_reports=False)

    captured_outcomes = []

    def record_lost_event(reason, data_category=None, item=None):
        if data_category is None:
            data_category = item.data_category
        return captured_outcomes.append((reason, data_category))

    monkeypatch.setattr(client.transport, "record_lost_event", record_lost_event)

    capturing_server.respond_with(
        code=response_code,
        headers={"X-Sentry-Rate-Limits": "4711:transaction:organization"},
    )

    client.capture_event({"type": "transaction"})
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"
    capturing_server.clear_captured()

    assert set(client.transport._disabled_until) == set(["transaction"])

    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "transaction"})
    client.flush()

    assert not capturing_server.captured

    client.capture_event({"type": "event"})
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"

    assert captured_outcomes == [
        ("ratelimit_backoff", "transaction"),
        ("ratelimit_backoff", "transaction"),
    ]


@pytest.mark.parametrize("response_code", [200, 429])
def test_data_category_limits_reporting(
    capturing_server, response_code, make_client, monkeypatch
):
    client = make_client(send_client_reports=True)

    capturing_server.respond_with(
        code=response_code,
        headers={
            "X-Sentry-Rate-Limits": "4711:transaction:organization, 4711:attachment:organization"
        },
    )

    outcomes_enabled = False
    real_fetch = client.transport._fetch_pending_client_report

    def intercepting_fetch(*args, **kwargs):
        if outcomes_enabled:
            return real_fetch(*args, **kwargs)

    monkeypatch.setattr(
        client.transport, "_fetch_pending_client_report", intercepting_fetch
    )
    # get rid of threading making things hard to track
    monkeypatch.setattr(client.transport._worker, "submit", lambda x: x() or True)

    client.capture_event({"type": "transaction"})
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"
    capturing_server.clear_captured()

    assert set(client.transport._disabled_until) == set(["attachment", "transaction"])

    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "transaction"})
    capturing_server.clear_captured()

    # flush out the events but don't flush the client reports
    client.flush()
    client.transport._last_client_report_sent = 0
    outcomes_enabled = True

    scope = get_isolation_scope()
    scope.add_attachment(bytes=b"Hello World", filename="hello.txt")
    client.capture_event({"type": "error"}, scope=scope)
    client.flush()

    # this goes out with an extra envelope because it's flushed after the last item
    # that is normally in the queue.  This is quite funny in a way because it means
    # that the envelope that caused its own over quota report (an error with an
    # attachment) will include its outcome since it's pending.
    assert len(capturing_server.captured) == 1
    envelope = capturing_server.captured[0].envelope
    assert envelope.items[0].type == "event"
    assert envelope.items[1].type == "client_report"
    report = parse_json(envelope.items[1].get_bytes())

    discarded_events = report["discarded_events"]

    assert len(discarded_events) == 3
    assert {
        "category": "transaction",
        "reason": "ratelimit_backoff",
        "quantity": 2,
    } in discarded_events
    assert {
        "category": "span",
        "reason": "ratelimit_backoff",
        "quantity": 2,
    } in discarded_events
    assert {
        "category": "attachment",
        "reason": "ratelimit_backoff",
        "quantity": 11,
    } in discarded_events

    capturing_server.clear_captured()

    # here we sent a normal event
    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "error", "release": "foo"})
    client.flush()

    assert len(capturing_server.captured) == 2

    assert len(capturing_server.captured[0].envelope.items) == 1
    event = capturing_server.captured[0].envelope.items[0].get_event()
    assert event["type"] == "error"
    assert event["release"] == "foo"

    envelope = capturing_server.captured[1].envelope
    assert envelope.items[0].type == "client_report"
    report = parse_json(envelope.items[0].get_bytes())

    discarded_events = report["discarded_events"]
    assert len(discarded_events) == 2
    assert {
        "category": "transaction",
        "reason": "ratelimit_backoff",
        "quantity": 1,
    } in discarded_events
    assert {
        "category": "span",
        "reason": "ratelimit_backoff",
        "quantity": 1,
    } in discarded_events


@pytest.mark.parametrize("response_code", [200, 429])
def test_complex_limits_without_data_category(
    capturing_server, response_code, make_client
):
    client = make_client()
    capturing_server.respond_with(
        code=response_code,
        headers={"X-Sentry-Rate-Limits": "4711::organization"},
    )

    client.capture_event({"type": "transaction"})
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"
    capturing_server.clear_captured()

    assert set(client.transport._disabled_until) == set([None])

    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "transaction"})
    client.capture_event({"type": "event"})
    client.flush()

    assert len(capturing_server.captured) == 0


@pytest.mark.parametrize("response_code", [200, 429])
def test_metric_bucket_limits(capturing_server, response_code, make_client):
    client = make_client()
    capturing_server.respond_with(
        code=response_code,
        headers={
            "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:custom"
        },
    )

    envelope = Envelope()
    envelope.add_item(Item(payload=b"{}", type="statsd"))
    client.transport.capture_envelope(envelope)
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"
    capturing_server.clear_captured()

    assert set(client.transport._disabled_until) == set(["metric_bucket"])

    client.transport.capture_envelope(envelope)
    client.capture_event({"type": "transaction"})
    client.flush()

    assert len(capturing_server.captured) == 2

    envelope = capturing_server.captured[0].envelope
    assert envelope.items[0].type == "transaction"
    envelope = capturing_server.captured[1].envelope
    assert envelope.items[0].type == "client_report"
    report = parse_json(envelope.items[0].get_bytes())
    assert report["discarded_events"] == [
        {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1},
    ]


@pytest.mark.parametrize("response_code", [200, 429])
def test_metric_bucket_limits_with_namespace(
    capturing_server, response_code, make_client
):
    client = make_client()
    capturing_server.respond_with(
        code=response_code,
        headers={
            "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:foo"
        },
    )

    envelope = Envelope()
    envelope.add_item(Item(payload=b"{}", type="statsd"))
    client.transport.capture_envelope(envelope)
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"
    capturing_server.clear_captured()

    assert set(client.transport._disabled_until) == set([])

    client.transport.capture_envelope(envelope)
    client.capture_event({"type": "transaction"})
    client.flush()

    assert len(capturing_server.captured) == 2

    envelope = capturing_server.captured[0].envelope
    assert envelope.items[0].type == "statsd"
    envelope = capturing_server.captured[1].envelope
    assert envelope.items[0].type == "transaction"


@pytest.mark.parametrize("response_code", [200, 429])
def test_metric_bucket_limits_with_all_namespaces(
    capturing_server, response_code, make_client
):
    client = make_client()
    capturing_server.respond_with(
        code=response_code,
        headers={
            "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded"
        },
    )

    envelope = Envelope()
    envelope.add_item(Item(payload=b"{}", type="statsd"))
    client.transport.capture_envelope(envelope)
    client.flush()

    assert len(capturing_server.captured) == 1
    assert capturing_server.captured[0].path == "/api/132/envelope/"
    capturing_server.clear_captured()

    assert set(client.transport._disabled_until) == set(["metric_bucket"])

    client.transport.capture_envelope(envelope)
    client.capture_event({"type": "transaction"})
    client.flush()

    assert len(capturing_server.captured) == 2

    envelope = capturing_server.captured[0].envelope
    assert envelope.items[0].type == "transaction"
    envelope = capturing_server.captured[1].envelope
    assert envelope.items[0].type == "client_report"
    report = parse_json(envelope.items[0].get_bytes())
    assert report["discarded_events"] == [
        {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1},
    ]


def test_hub_cls_backwards_compat():
    class TestCustomHubClass(Hub):
        pass

    transport = HttpTransport(
        defaultdict(lambda: None, {"dsn": "https://123abc@example.com/123"})
    )

    with pytest.deprecated_call():
        assert transport.hub_cls is Hub

    with pytest.deprecated_call():
        transport.hub_cls = TestCustomHubClass

    with pytest.deprecated_call():
        assert transport.hub_cls is TestCustomHubClass


@pytest.mark.parametrize("quantity", (1, 2, 10))
def test_record_lost_event_quantity(capturing_server, make_client, quantity):
    client = make_client()
    transport = client.transport

    transport.record_lost_event(reason="test", data_category="span", quantity=quantity)
    client.flush()

    (captured,) = capturing_server.captured  # Should only be one envelope
    envelope = captured.envelope
    (item,) = envelope.items  # Envelope should only have one item

    assert item.type == "client_report"

    report = parse_json(item.get_bytes())

    assert report["discarded_events"] == [
        {"category": "span", "reason": "test", "quantity": quantity}
    ]


@pytest.mark.parametrize("span_count", (0, 1, 2, 10))
def test_record_lost_event_transaction_item(capturing_server, make_client, span_count):
    client = make_client()
    transport = client.transport

    envelope = mock_transaction_envelope(span_count)
    (transaction_item,) = envelope.items

    transport.record_lost_event(reason="test", item=transaction_item)
    client.flush()

    (captured,) = capturing_server.captured  # Should only be one envelope
    envelope = captured.envelope
    (item,) = envelope.items  # Envelope should only have one item

    assert item.type == "client_report"

    report = parse_json(item.get_bytes())
    discarded_events = report["discarded_events"]

    assert len(discarded_events) == 2

    assert {
        "category": "transaction",
        "reason": "test",
        "quantity": 1,
    } in discarded_events

    assert {
        "category": "span",
        "reason": "test",
        "quantity": span_count + 1,
    } in discarded_events
sentry-python-2.18.0/tests/test_types.py000066400000000000000000000013701471214654000203710ustar00rootroot00000000000000import sys

import pytest
from sentry_sdk.types import Event, Hint


@pytest.mark.skipif(
    sys.version_info < (3, 10),
    reason="Type hinting with `|` is available in Python 3.10+",
)
def test_event_or_none_runtime():
    """
    Ensures that the `Event` type's runtime value supports the `|` operation with `None`.
    This test is needed to ensure that using an `Event | None` type hint (e.g. for
    `before_send`'s return value) does not raise a TypeError at runtime.
    """
    Event | None


@pytest.mark.skipif(
    sys.version_info < (3, 10),
    reason="Type hinting with `|` is available in Python 3.10+",
)
def test_hint_or_none_runtime():
    """
    Analogue to `test_event_or_none_runtime`, but for the `Hint` type.
    """
    Hint | None
sentry-python-2.18.0/tests/test_utils.py000066400000000000000000000703041471214654000203700ustar00rootroot00000000000000import threading
import re
import sys
from datetime import timedelta, datetime, timezone
from unittest import mock

import pytest

import sentry_sdk
from sentry_sdk.integrations import Integration
from sentry_sdk._queue import Queue
from sentry_sdk.utils import (
    Components,
    Dsn,
    datetime_from_isoformat,
    env_to_bool,
    format_timestamp,
    get_current_thread_meta,
    get_default_release,
    get_error_message,
    get_git_revision,
    is_valid_sample_rate,
    logger,
    match_regex_list,
    parse_url,
    parse_version,
    safe_str,
    sanitize_url,
    serialize_frame,
    is_sentry_url,
    _get_installed_modules,
    _generate_installed_modules,
    ensure_integration_enabled,
)


class TestIntegration(Integration):
    """
    Test integration for testing ensure_integration_enabled decorator.
    """

    identifier = "test"
    setup_once = mock.MagicMock()


try:
    import gevent
except ImportError:
    gevent = None


def _normalize_distribution_name(name):
    # type: (str) -> str
    """Normalize distribution name according to PEP-0503.

    See:
    https://peps.python.org/pep-0503/#normalized-names
    for more details.
    """
    return re.sub(r"[-_.]+", "-", name).lower()


@pytest.mark.parametrize(
    ("input_str", "expected_output"),
    (
        (
            "2021-01-01T00:00:00.000000Z",
            datetime(2021, 1, 1, tzinfo=timezone.utc),
        ),  # UTC time
        (
            "2021-01-01T00:00:00.000000",
            datetime(2021, 1, 1).astimezone(timezone.utc),
        ),  # No TZ -- assume local but convert to UTC
        (
            "2021-01-01T00:00:00Z",
            datetime(2021, 1, 1, tzinfo=timezone.utc),
        ),  # UTC - No milliseconds
        (
            "2021-01-01T00:00:00.000000+00:00",
            datetime(2021, 1, 1, tzinfo=timezone.utc),
        ),
        (
            "2021-01-01T00:00:00.000000-00:00",
            datetime(2021, 1, 1, tzinfo=timezone.utc),
        ),
        (
            "2021-01-01T00:00:00.000000+0000",
            datetime(2021, 1, 1, tzinfo=timezone.utc),
        ),
        (
            "2021-01-01T00:00:00.000000-0000",
            datetime(2021, 1, 1, tzinfo=timezone.utc),
        ),
        (
            "2020-12-31T00:00:00.000000+02:00",
            datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=2))),
        ),  # UTC+2 time
        (
            "2020-12-31T00:00:00.000000-0200",
            datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))),
        ),  # UTC-2 time
        (
            "2020-12-31T00:00:00-0200",
            datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))),
        ),  # UTC-2 time - no milliseconds
    ),
)
def test_datetime_from_isoformat(input_str, expected_output):
    assert datetime_from_isoformat(input_str) == expected_output, input_str


@pytest.mark.parametrize(
    "env_var_value,strict,expected",
    [
        (None, True, None),
        (None, False, False),
        ("", True, None),
        ("", False, False),
        ("t", True, True),
        ("T", True, True),
        ("t", False, True),
        ("T", False, True),
        ("y", True, True),
        ("Y", True, True),
        ("y", False, True),
        ("Y", False, True),
        ("1", True, True),
        ("1", False, True),
        ("True", True, True),
        ("True", False, True),
        ("true", True, True),
        ("true", False, True),
        ("tRuE", True, True),
        ("tRuE", False, True),
        ("Yes", True, True),
        ("Yes", False, True),
        ("yes", True, True),
        ("yes", False, True),
        ("yEs", True, True),
        ("yEs", False, True),
        ("On", True, True),
        ("On", False, True),
        ("on", True, True),
        ("on", False, True),
        ("oN", True, True),
        ("oN", False, True),
        ("f", True, False),
        ("f", False, False),
        ("n", True, False),
        ("N", True, False),
        ("n", False, False),
        ("N", False, False),
        ("0", True, False),
        ("0", False, False),
        ("False", True, False),
        ("False", False, False),
        ("false", True, False),
        ("false", False, False),
        ("FaLsE", True, False),
        ("FaLsE", False, False),
        ("No", True, False),
        ("No", False, False),
        ("no", True, False),
        ("no", False, False),
        ("nO", True, False),
        ("nO", False, False),
        ("Off", True, False),
        ("Off", False, False),
        ("off", True, False),
        ("off", False, False),
        ("oFf", True, False),
        ("oFf", False, False),
        ("xxx", True, None),
        ("xxx", False, True),
    ],
)
def test_env_to_bool(env_var_value, strict, expected):
    assert (
        env_to_bool(env_var_value, strict=strict) == expected
    ), f"Value: {env_var_value}, strict: {strict}"


@pytest.mark.parametrize(
    ("url", "expected_result"),
    [
        ("http://localhost:8000", "http://localhost:8000"),
        ("http://example.com", "http://example.com"),
        ("https://example.com", "https://example.com"),
        (
            "example.com?token=abc&sessionid=123&save=true",
            "example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
        (
            "http://example.com?token=abc&sessionid=123&save=true",
            "http://example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
        (
            "https://example.com?token=abc&sessionid=123&save=true",
            "https://example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
        (
            "http://localhost:8000/?token=abc&sessionid=123&save=true",
            "http://localhost:8000/?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
        (
            "ftp://username:password@ftp.example.com:9876/bla/blub#foo",
            "ftp://[Filtered]:[Filtered]@ftp.example.com:9876/bla/blub#foo",
        ),
        (
            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
            "https://[Filtered]:[Filtered]@example.com/bla/blub?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]#fragment",
        ),
        ("bla/blub/foo", "bla/blub/foo"),
        ("/bla/blub/foo/", "/bla/blub/foo/"),
        (
            "bla/blub/foo?token=abc&sessionid=123&save=true",
            "bla/blub/foo?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
        (
            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
            "/bla/blub/foo/?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
        ),
    ],
)
def test_sanitize_url(url, expected_result):
    assert sanitize_url(url) == expected_result


@pytest.mark.parametrize(
    ("url", "expected_result"),
    [
        (
            "http://localhost:8000",
            Components(
                scheme="http", netloc="localhost:8000", path="", query="", fragment=""
            ),
        ),
        (
            "http://example.com",
            Components(
                scheme="http", netloc="example.com", path="", query="", fragment=""
            ),
        ),
        (
            "https://example.com",
            Components(
                scheme="https", netloc="example.com", path="", query="", fragment=""
            ),
        ),
        (
            "example.com?token=abc&sessionid=123&save=true",
            Components(
                scheme="",
                netloc="",
                path="example.com",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
        (
            "http://example.com?token=abc&sessionid=123&save=true",
            Components(
                scheme="http",
                netloc="example.com",
                path="",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
        (
            "https://example.com?token=abc&sessionid=123&save=true",
            Components(
                scheme="https",
                netloc="example.com",
                path="",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
        (
            "http://localhost:8000/?token=abc&sessionid=123&save=true",
            Components(
                scheme="http",
                netloc="localhost:8000",
                path="/",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
        (
            "ftp://username:password@ftp.example.com:9876/bla/blub#foo",
            Components(
                scheme="ftp",
                netloc="[Filtered]:[Filtered]@ftp.example.com:9876",
                path="/bla/blub",
                query="",
                fragment="foo",
            ),
        ),
        (
            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
            Components(
                scheme="https",
                netloc="[Filtered]:[Filtered]@example.com",
                path="/bla/blub",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="fragment",
            ),
        ),
        (
            "bla/blub/foo",
            Components(
                scheme="", netloc="", path="bla/blub/foo", query="", fragment=""
            ),
        ),
        (
            "bla/blub/foo?token=abc&sessionid=123&save=true",
            Components(
                scheme="",
                netloc="",
                path="bla/blub/foo",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
        (
            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
            Components(
                scheme="",
                netloc="",
                path="/bla/blub/foo/",
                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
                fragment="",
            ),
        ),
    ],
)
def test_sanitize_url_and_split(url, expected_result):
    sanitized_url = sanitize_url(url, split=True)

    assert sanitized_url.scheme == expected_result.scheme
    assert sanitized_url.netloc == expected_result.netloc
    assert sanitized_url.query == expected_result.query
    assert sanitized_url.path == expected_result.path
    assert sanitized_url.fragment == expected_result.fragment


@pytest.mark.parametrize(
    ("url", "sanitize", "expected_url", "expected_query", "expected_fragment"),
    [
        # Test with sanitize=True
        (
            "https://example.com",
            True,
            "https://example.com",
            "",
            "",
        ),
        (
            "example.com?token=abc&sessionid=123&save=true",
            True,
            "example.com",
            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
            "",
        ),
        (
            "https://example.com?token=abc&sessionid=123&save=true",
            True,
            "https://example.com",
            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
            "",
        ),
        (
            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
            True,
            "https://[Filtered]:[Filtered]@example.com/bla/blub",
            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
            "fragment",
        ),
        (
            "bla/blub/foo",
            True,
            "bla/blub/foo",
            "",
            "",
        ),
        (
            "/bla/blub/foo/#baz",
            True,
            "/bla/blub/foo/",
            "",
            "baz",
        ),
        (
            "bla/blub/foo?token=abc&sessionid=123&save=true",
            True,
            "bla/blub/foo",
            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
            "",
        ),
        (
            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
            True,
            "/bla/blub/foo/",
            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
            "",
        ),
        # Test with sanitize=False
        (
            "https://example.com",
            False,
            "https://example.com",
            "",
            "",
        ),
        (
            "example.com?token=abc&sessionid=123&save=true",
            False,
            "example.com",
            "token=abc&sessionid=123&save=true",
            "",
        ),
        (
            "https://example.com?token=abc&sessionid=123&save=true",
            False,
            "https://example.com",
            "token=abc&sessionid=123&save=true",
            "",
        ),
        (
            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
            False,
            "https://[Filtered]:[Filtered]@example.com/bla/blub",
            "token=abc&sessionid=123&save=true",
            "fragment",
        ),
        (
            "bla/blub/foo",
            False,
            "bla/blub/foo",
            "",
            "",
        ),
        (
            "/bla/blub/foo/#baz",
            False,
            "/bla/blub/foo/",
            "",
            "baz",
        ),
        (
            "bla/blub/foo?token=abc&sessionid=123&save=true",
            False,
            "bla/blub/foo",
            "token=abc&sessionid=123&save=true",
            "",
        ),
        (
            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
            False,
            "/bla/blub/foo/",
            "token=abc&sessionid=123&save=true",
            "",
        ),
    ],
)
def test_parse_url(url, sanitize, expected_url, expected_query, expected_fragment):
    assert parse_url(url, sanitize=sanitize).url == expected_url
    assert parse_url(url, sanitize=sanitize).fragment == expected_fragment
    assert parse_url(url, sanitize=sanitize).query == expected_query


@pytest.mark.parametrize(
    "rate",
    [0.0, 0.1231, 1.0, True, False],
)
def test_accepts_valid_sample_rate(rate):
    with mock.patch.object(logger, "warning", mock.Mock()):
        result = is_valid_sample_rate(rate, source="Testing")
        assert logger.warning.called is False
        assert result is True


@pytest.mark.parametrize(
    "rate",
    [
        "dogs are great",  # wrong type
        (0, 1),  # wrong type
        {"Maisey": "Charllie"},  # wrong type
        [True, True],  # wrong type
        {0.2012},  # wrong type
        float("NaN"),  # wrong type
        None,  # wrong type
        -1.121,  # wrong value
        1.231,  # wrong value
    ],
)
def test_warns_on_invalid_sample_rate(rate, StringContaining):  # noqa: N803
    with mock.patch.object(logger, "warning", mock.Mock()):
        result = is_valid_sample_rate(rate, source="Testing")
        logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
        assert result is False


@pytest.mark.parametrize(
    "include_source_context",
    [True, False],
)
def test_include_source_context_when_serializing_frame(include_source_context):
    frame = sys._getframe()
    result = serialize_frame(frame, include_source_context=include_source_context)

    assert include_source_context ^ ("pre_context" in result) ^ True
    assert include_source_context ^ ("context_line" in result) ^ True
    assert include_source_context ^ ("post_context" in result) ^ True


@pytest.mark.parametrize(
    "item,regex_list,expected_result",
    [
        ["", [], False],
        [None, [], False],
        ["", None, False],
        [None, None, False],
        ["some-string", [], False],
        ["some-string", None, False],
        ["some-string", ["some-string"], True],
        ["some-string", ["some"], False],
        ["some-string", ["some$"], False],  # same as above
        ["some-string", ["some.*"], True],
        ["some-string", ["Some"], False],  # we do case sensitive matching
        ["some-string", [".*string$"], True],
    ],
)
def test_match_regex_list(item, regex_list, expected_result):
    assert match_regex_list(item, regex_list) == expected_result


@pytest.mark.parametrize(
    "version,expected_result",
    [
        ["3.5.15", (3, 5, 15)],
        ["2.0.9", (2, 0, 9)],
        ["2.0.0", (2, 0, 0)],
        ["0.6.0", (0, 6, 0)],
        ["2.0.0.post1", (2, 0, 0)],
        ["2.0.0rc3", (2, 0, 0)],
        ["2.0.0rc2", (2, 0, 0)],
        ["2.0.0rc1", (2, 0, 0)],
        ["2.0.0b4", (2, 0, 0)],
        ["2.0.0b3", (2, 0, 0)],
        ["2.0.0b2", (2, 0, 0)],
        ["2.0.0b1", (2, 0, 0)],
        ["0.6beta3", (0, 6)],
        ["0.6beta2", (0, 6)],
        ["0.6beta1", (0, 6)],
        ["0.4.2b", (0, 4, 2)],
        ["0.4.2a", (0, 4, 2)],
        ["0.0.1", (0, 0, 1)],
        ["0.0.0", (0, 0, 0)],
        ["1", (1,)],
        ["1.0", (1, 0)],
        ["1.0.0", (1, 0, 0)],
        [" 1.0.0 ", (1, 0, 0)],
        ["  1.0.0   ", (1, 0, 0)],
        ["x1.0.0", None],
        ["1.0.0x", None],
        ["x1.0.0x", None],
    ],
)
def test_parse_version(version, expected_result):
    assert parse_version(version) == expected_result


@pytest.fixture
def mock_client_with_dsn_netloc():
    """
    Returns a mocked Client with a DSN netloc of "abcd1234.ingest.sentry.io".
    """
    mock_client = mock.Mock(spec=sentry_sdk.Client)
    mock_client.transport = mock.Mock(spec=sentry_sdk.Transport)
    mock_client.transport.parsed_dsn = mock.Mock(spec=Dsn)

    mock_client.transport.parsed_dsn.netloc = "abcd1234.ingest.sentry.io"

    return mock_client


@pytest.mark.parametrize(
    ["test_url", "is_sentry_url_expected"],
    [
        ["https://asdf@abcd1234.ingest.sentry.io/123456789", True],
        ["https://asdf@abcd1234.ingest.notsentry.io/123456789", False],
    ],
)
def test_is_sentry_url_true(
    test_url, is_sentry_url_expected, mock_client_with_dsn_netloc
):
    ret_val = is_sentry_url(mock_client_with_dsn_netloc, test_url)

    assert ret_val == is_sentry_url_expected


def test_is_sentry_url_no_client():
    test_url = "https://asdf@abcd1234.ingest.sentry.io/123456789"

    ret_val = is_sentry_url(None, test_url)

    assert not ret_val


@pytest.mark.parametrize(
    "error,expected_result",
    [
        ["", lambda x: safe_str(x)],
        ["some-string", lambda _: "some-string"],
    ],
)
def test_get_error_message(error, expected_result):
    with pytest.raises(BaseException) as exc_value:
        exc_value.message = error
        raise Exception
    assert get_error_message(exc_value) == expected_result(exc_value)

    with pytest.raises(BaseException) as exc_value:
        exc_value.detail = error
        raise Exception
    assert get_error_message(exc_value) == expected_result(exc_value)


def test_installed_modules():
    try:
        from importlib.metadata import distributions, version

        importlib_available = True
    except ImportError:
        importlib_available = False

    try:
        import pkg_resources

        pkg_resources_available = True
    except ImportError:
        pkg_resources_available = False

    installed_distributions = {
        _normalize_distribution_name(dist): version
        for dist, version in _generate_installed_modules()
    }

    if importlib_available:
        importlib_distributions = {
            _normalize_distribution_name(dist.metadata["Name"]): version(
                dist.metadata["Name"]
            )
            for dist in distributions()
            if dist.metadata["Name"] is not None
            and version(dist.metadata["Name"]) is not None
        }
        assert installed_distributions == importlib_distributions

    elif pkg_resources_available:
        pkg_resources_distributions = {
            _normalize_distribution_name(dist.key): dist.version
            for dist in pkg_resources.working_set
        }
        assert installed_distributions == pkg_resources_distributions
    else:
        pytest.fail("Neither importlib nor pkg_resources is available")


def test_installed_modules_caching():
    mock_generate_installed_modules = mock.Mock()
    mock_generate_installed_modules.return_value = {"package": "1.0.0"}
    with mock.patch("sentry_sdk.utils._installed_modules", None):
        with mock.patch(
            "sentry_sdk.utils._generate_installed_modules",
            mock_generate_installed_modules,
        ):
            _get_installed_modules()
            assert mock_generate_installed_modules.called
            mock_generate_installed_modules.reset_mock()

            _get_installed_modules()
            mock_generate_installed_modules.assert_not_called()


def test_devnull_inaccessible():
    with mock.patch("sentry_sdk.utils.open", side_effect=OSError("oh no")):
        revision = get_git_revision()

    assert revision is None


def test_devnull_not_found():
    with mock.patch("sentry_sdk.utils.open", side_effect=FileNotFoundError("oh no")):
        revision = get_git_revision()

    assert revision is None


def test_default_release():
    release = get_default_release()
    assert release is not None


def test_default_release_empty_string():
    with mock.patch("sentry_sdk.utils.get_git_revision", return_value=""):
        release = get_default_release()

    assert release is None


def test_ensure_integration_enabled_integration_enabled(sentry_init):
    def original_function():
        return "original"

    def function_to_patch():
        return "patched"

    sentry_init(integrations=[TestIntegration()])

    # Test the decorator by applying to function_to_patch
    patched_function = ensure_integration_enabled(TestIntegration, original_function)(
        function_to_patch
    )

    assert patched_function() == "patched"
    assert patched_function.__name__ == "original_function"


def test_ensure_integration_enabled_integration_disabled(sentry_init):
    def original_function():
        return "original"

    def function_to_patch():
        return "patched"

    sentry_init(integrations=[])  # TestIntegration is disabled

    # Test the decorator by applying to function_to_patch
    patched_function = ensure_integration_enabled(TestIntegration, original_function)(
        function_to_patch
    )

    assert patched_function() == "original"
    assert patched_function.__name__ == "original_function"


def test_ensure_integration_enabled_no_original_function_enabled(sentry_init):
    shared_variable = "original"

    def function_to_patch():
        nonlocal shared_variable
        shared_variable = "patched"

    sentry_init(integrations=[TestIntegration])

    # Test the decorator by applying to function_to_patch
    patched_function = ensure_integration_enabled(TestIntegration)(function_to_patch)
    patched_function()

    assert shared_variable == "patched"
    assert patched_function.__name__ == "function_to_patch"


def test_ensure_integration_enabled_no_original_function_disabled(sentry_init):
    shared_variable = "original"

    def function_to_patch():
        nonlocal shared_variable
        shared_variable = "patched"

    sentry_init(integrations=[])

    # Test the decorator by applying to function_to_patch
    patched_function = ensure_integration_enabled(TestIntegration)(function_to_patch)
    patched_function()

    assert shared_variable == "original"
    assert patched_function.__name__ == "function_to_patch"


@pytest.mark.parametrize(
    "delta,expected_milliseconds",
    [
        [timedelta(milliseconds=132), 132.0],
        [timedelta(hours=1, milliseconds=132), float(60 * 60 * 1000 + 132)],
        [timedelta(days=10), float(10 * 24 * 60 * 60 * 1000)],
        [timedelta(microseconds=100), 0.1],
    ],
)
def test_duration_in_milliseconds(delta, expected_milliseconds):
    assert delta / timedelta(milliseconds=1) == expected_milliseconds


def test_get_current_thread_meta_explicit_thread():
    results = Queue(maxsize=1)

    def target1():
        pass

    def target2():
        results.put(get_current_thread_meta(thread1))

    thread1 = threading.Thread(target=target1)
    thread1.start()

    thread2 = threading.Thread(target=target2)
    thread2.start()

    thread2.join()
    thread1.join()

    assert (thread1.ident, thread1.name) == results.get(timeout=1)


def test_get_current_thread_meta_bad_explicit_thread():
    thread = "fake thread"

    main_thread = threading.main_thread()

    assert (main_thread.ident, main_thread.name) == get_current_thread_meta(thread)


@pytest.mark.skipif(gevent is None, reason="gevent not enabled")
def test_get_current_thread_meta_gevent_in_thread():
    results = Queue(maxsize=1)

    def target():
        with mock.patch("sentry_sdk.utils.is_gevent", side_effect=[True]):
            job = gevent.spawn(get_current_thread_meta)
            job.join()
            results.put(job.value)

    thread = threading.Thread(target=target)
    thread.start()
    thread.join()
    assert (thread.ident, None) == results.get(timeout=1)


@pytest.mark.skipif(gevent is None, reason="gevent not enabled")
def test_get_current_thread_meta_gevent_in_thread_failed_to_get_hub():
    results = Queue(maxsize=1)

    def target():
        with mock.patch("sentry_sdk.utils.is_gevent", side_effect=[True]):
            with mock.patch(
                "sentry_sdk.utils.get_gevent_hub", side_effect=["fake gevent hub"]
            ):
                job = gevent.spawn(get_current_thread_meta)
                job.join()
                results.put(job.value)

    thread = threading.Thread(target=target)
    thread.start()
    thread.join()
    assert (thread.ident, thread.name) == results.get(timeout=1)


def test_get_current_thread_meta_running_thread():
    results = Queue(maxsize=1)

    def target():
        results.put(get_current_thread_meta())

    thread = threading.Thread(target=target)
    thread.start()
    thread.join()
    assert (thread.ident, thread.name) == results.get(timeout=1)


def test_get_current_thread_meta_bad_running_thread():
    results = Queue(maxsize=1)

    def target():
        with mock.patch("threading.current_thread", side_effect=["fake thread"]):
            results.put(get_current_thread_meta())

    thread = threading.Thread(target=target)
    thread.start()
    thread.join()

    main_thread = threading.main_thread()
    assert (main_thread.ident, main_thread.name) == results.get(timeout=1)


def test_get_current_thread_meta_main_thread():
    results = Queue(maxsize=1)

    def target():
        # mock that somehow the current thread doesn't exist
        with mock.patch("threading.current_thread", side_effect=[None]):
            results.put(get_current_thread_meta())

    main_thread = threading.main_thread()

    thread = threading.Thread(target=target)
    thread.start()
    thread.join()
    assert (main_thread.ident, main_thread.name) == results.get(timeout=1)


def test_get_current_thread_meta_failed_to_get_main_thread():
    results = Queue(maxsize=1)

    def target():
        with mock.patch("threading.current_thread", side_effect=["fake thread"]):
            with mock.patch("threading.current_thread", side_effect=["fake thread"]):
                results.put(get_current_thread_meta())

    main_thread = threading.main_thread()

    thread = threading.Thread(target=target)
    thread.start()
    thread.join()
    assert (main_thread.ident, main_thread.name) == results.get(timeout=1)


@pytest.mark.parametrize(
    ("datetime_object", "expected_output"),
    (
        (
            datetime(2021, 1, 1, tzinfo=timezone.utc),
            "2021-01-01T00:00:00.000000Z",
        ),  # UTC time
        (
            datetime(2021, 1, 1, tzinfo=timezone(timedelta(hours=2))),
            "2020-12-31T22:00:00.000000Z",
        ),  # UTC+2 time
        (
            datetime(2021, 1, 1, tzinfo=timezone(timedelta(hours=-7))),
            "2021-01-01T07:00:00.000000Z",
        ),  # UTC-7 time
        (
            datetime(2021, 2, 3, 4, 56, 7, 890123, tzinfo=timezone.utc),
            "2021-02-03T04:56:07.890123Z",
        ),  # UTC time all non-zero fields
    ),
)
def test_format_timestamp(datetime_object, expected_output):
    formatted = format_timestamp(datetime_object)

    assert formatted == expected_output


def test_format_timestamp_naive():
    datetime_object = datetime(2021, 1, 1)
    timestamp_regex = r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{6}Z"

    # Ensure that some timestamp is returned, without error. We currently treat these as local time, but this is an
    # implementation detail which we should not assert here.
    assert re.fullmatch(timestamp_regex, format_timestamp(datetime_object))
sentry-python-2.18.0/tests/tracing/000077500000000000000000000000001471214654000172425ustar00rootroot00000000000000sentry-python-2.18.0/tests/tracing/test_baggage.py000066400000000000000000000046631471214654000222410ustar00rootroot00000000000000from sentry_sdk.tracing_utils import Baggage


def test_third_party_baggage():
    header = "other-vendor-value-1=foo;bar;baz, other-vendor-value-2=foo;bar;"
    baggage = Baggage.from_incoming_header(header)

    assert baggage.mutable
    assert baggage.sentry_items == {}
    assert (
        baggage.third_party_items
        == "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
    )

    assert baggage.dynamic_sampling_context() == {}
    assert baggage.serialize() == ""
    assert (
        baggage.serialize(include_third_party=True)
        == "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
    )


def test_mixed_baggage():
    header = (
        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
        "sentry-user_id=Am%C3%A9lie, sentry-foo=bar, other-vendor-value-2=foo;bar;"
    )

    baggage = Baggage.from_incoming_header(header)

    assert not baggage.mutable

    assert baggage.sentry_items == {
        "public_key": "49d0f7386ad645858ae85020e393bef3",
        "trace_id": "771a43a4192642f0b136d5159a501700",
        "user_id": "Amélie",
        "sample_rate": "0.01337",
        "foo": "bar",
    }

    assert (
        baggage.third_party_items
        == "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
    )

    assert baggage.dynamic_sampling_context() == {
        "public_key": "49d0f7386ad645858ae85020e393bef3",
        "trace_id": "771a43a4192642f0b136d5159a501700",
        "user_id": "Amélie",
        "sample_rate": "0.01337",
        "foo": "bar",
    }

    assert baggage.serialize() == (
        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,"
        "sentry-foo=bar"
    )

    assert baggage.serialize(include_third_party=True) == (
        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,sentry-foo=bar,"
        "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
    )


def test_malformed_baggage():
    header = ","

    baggage = Baggage.from_incoming_header(header)

    assert baggage.sentry_items == {}
    assert baggage.third_party_items == ""
    assert baggage.mutable
sentry-python-2.18.0/tests/tracing/test_decorator.py000066400000000000000000000071531471214654000226430ustar00rootroot00000000000000import inspect
from unittest import mock

import pytest

from sentry_sdk.tracing import trace
from sentry_sdk.tracing_utils import start_child_span_decorator
from sentry_sdk.utils import logger
from tests.conftest import patch_start_tracing_child


def my_example_function():
    return "return_of_sync_function"


async def my_async_example_function():
    return "return_of_async_function"


@pytest.mark.forked
def test_trace_decorator():
    with patch_start_tracing_child() as fake_start_child:
        result = my_example_function()
        fake_start_child.assert_not_called()
        assert result == "return_of_sync_function"

        result2 = start_child_span_decorator(my_example_function)()
        fake_start_child.assert_called_once_with(
            op="function", name="test_decorator.my_example_function"
        )
        assert result2 == "return_of_sync_function"


def test_trace_decorator_no_trx():
    with patch_start_tracing_child(fake_transaction_is_none=True):
        with mock.patch.object(logger, "debug", mock.Mock()) as fake_debug:
            result = my_example_function()
            fake_debug.assert_not_called()
            assert result == "return_of_sync_function"

            result2 = start_child_span_decorator(my_example_function)()
            fake_debug.assert_called_once_with(
                "Cannot create a child span for %s. "
                "Please start a Sentry transaction before calling this function.",
                "test_decorator.my_example_function",
            )
            assert result2 == "return_of_sync_function"


@pytest.mark.forked
@pytest.mark.asyncio
async def test_trace_decorator_async():
    with patch_start_tracing_child() as fake_start_child:
        result = await my_async_example_function()
        fake_start_child.assert_not_called()
        assert result == "return_of_async_function"

        result2 = await start_child_span_decorator(my_async_example_function)()
        fake_start_child.assert_called_once_with(
            op="function",
            name="test_decorator.my_async_example_function",
        )
        assert result2 == "return_of_async_function"


@pytest.mark.asyncio
async def test_trace_decorator_async_no_trx():
    with patch_start_tracing_child(fake_transaction_is_none=True):
        with mock.patch.object(logger, "debug", mock.Mock()) as fake_debug:
            result = await my_async_example_function()
            fake_debug.assert_not_called()
            assert result == "return_of_async_function"

            result2 = await start_child_span_decorator(my_async_example_function)()
            fake_debug.assert_called_once_with(
                "Cannot create a child span for %s. "
                "Please start a Sentry transaction before calling this function.",
                "test_decorator.my_async_example_function",
            )
            assert result2 == "return_of_async_function"


def test_functions_to_trace_signature_unchanged_sync(sentry_init):
    sentry_init(
        traces_sample_rate=1.0,
    )

    def _some_function(a, b, c):
        pass

    @trace
    def _some_function_traced(a, b, c):
        pass

    assert inspect.getcallargs(_some_function, 1, 2, 3) == inspect.getcallargs(
        _some_function_traced, 1, 2, 3
    )


@pytest.mark.asyncio
async def test_functions_to_trace_signature_unchanged_async(sentry_init):
    sentry_init(
        traces_sample_rate=1.0,
    )

    async def _some_function(a, b, c):
        pass

    @trace
    async def _some_function_traced(a, b, c):
        pass

    assert inspect.getcallargs(_some_function, 1, 2, 3) == inspect.getcallargs(
        _some_function_traced, 1, 2, 3
    )
sentry-python-2.18.0/tests/tracing/test_deprecated.py000066400000000000000000000035771471214654000227670ustar00rootroot00000000000000import warnings

import pytest

import sentry_sdk
import sentry_sdk.tracing
from sentry_sdk import start_span

from sentry_sdk.tracing import Span


@pytest.mark.skip(reason="This deprecated feature has been removed in SDK 2.0.")
def test_start_span_to_start_transaction(sentry_init, capture_events):
    # XXX: this only exists for backwards compatibility with code before
    # Transaction / start_transaction were introduced.
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    with start_span(transaction="/1/"):
        pass

    with start_span(Span(transaction="/2/")):
        pass

    assert len(events) == 2
    assert events[0]["transaction"] == "/1/"
    assert events[1]["transaction"] == "/2/"


@pytest.mark.parametrize(
    "parameter_value_getter",
    # Use lambda to avoid Hub deprecation warning here (will suppress it in the test)
    (lambda: sentry_sdk.Hub(), lambda: sentry_sdk.Scope()),
)
def test_passing_hub_parameter_to_transaction_finish(
    suppress_deprecation_warnings, parameter_value_getter
):
    parameter_value = parameter_value_getter()
    transaction = sentry_sdk.tracing.Transaction()
    with pytest.warns(DeprecationWarning):
        transaction.finish(hub=parameter_value)


def test_passing_hub_object_to_scope_transaction_finish(suppress_deprecation_warnings):
    transaction = sentry_sdk.tracing.Transaction()

    # Do not move the following line under the `with` statement. Otherwise, the Hub.__init__ deprecation
    # warning will be confused with the transaction.finish deprecation warning that we are testing.
    hub = sentry_sdk.Hub()

    with pytest.warns(DeprecationWarning):
        transaction.finish(hub)


def test_no_warnings_scope_to_transaction_finish():
    transaction = sentry_sdk.tracing.Transaction()
    with warnings.catch_warnings():
        warnings.simplefilter("error")
        transaction.finish(sentry_sdk.Scope())
sentry-python-2.18.0/tests/tracing/test_http_headers.py000066400000000000000000000032771471214654000233360ustar00rootroot00000000000000from unittest import mock

import pytest

from sentry_sdk.tracing import Transaction
from sentry_sdk.tracing_utils import extract_sentrytrace_data


@pytest.mark.parametrize("sampled", [True, False, None])
def test_to_traceparent(sampled):
    transaction = Transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
        trace_id="12312012123120121231201212312012",
        sampled=sampled,
    )

    traceparent = transaction.to_traceparent()

    parts = traceparent.split("-")
    assert parts[0] == "12312012123120121231201212312012"  # trace_id
    assert parts[1] == transaction.span_id  # parent_span_id
    if sampled is None:
        assert len(parts) == 2
    else:
        assert parts[2] == "1" if sampled is True else "0"  # sampled


@pytest.mark.parametrize("sampling_decision", [True, False])
def test_sentrytrace_extraction(sampling_decision):
    sentrytrace_header = "12312012123120121231201212312012-0415201309082013-{}".format(
        1 if sampling_decision is True else 0
    )
    assert extract_sentrytrace_data(sentrytrace_header) == {
        "trace_id": "12312012123120121231201212312012",
        "parent_span_id": "0415201309082013",
        "parent_sampled": sampling_decision,
    }


def test_iter_headers(monkeypatch):
    monkeypatch.setattr(
        Transaction,
        "to_traceparent",
        mock.Mock(return_value="12312012123120121231201212312012-0415201309082013-0"),
    )

    transaction = Transaction(
        name="/interactions/other-dogs/new-dog",
        op="greeting.sniff",
    )

    headers = dict(transaction.iter_headers())
    assert (
        headers["sentry-trace"] == "12312012123120121231201212312012-0415201309082013-0"
    )
sentry-python-2.18.0/tests/tracing/test_integration_tests.py000066400000000000000000000226501471214654000244250ustar00rootroot00000000000000import weakref
import gc
import re
import pytest
import random

import sentry_sdk
from sentry_sdk import (
    capture_message,
    start_span,
    start_transaction,
)
from sentry_sdk.consts import SPANSTATUS
from sentry_sdk.transport import Transport
from sentry_sdk.tracing import Transaction


@pytest.mark.parametrize("sample_rate", [0.0, 1.0])
def test_basic(sentry_init, capture_events, sample_rate):
    sentry_init(traces_sample_rate=sample_rate)
    events = capture_events()

    with start_transaction(name="hi") as transaction:
        transaction.set_status(SPANSTATUS.OK)
        with pytest.raises(ZeroDivisionError):
            with start_span(op="foo", name="foodesc"):
                1 / 0

        with start_span(op="bar", name="bardesc"):
            pass

    if sample_rate:
        assert len(events) == 1
        event = events[0]

        assert event["transaction"] == "hi"
        assert event["transaction_info"]["source"] == "custom"

        span1, span2 = event["spans"]
        parent_span = event
        assert span1["tags"]["status"] == "internal_error"
        assert span1["op"] == "foo"
        assert span1["description"] == "foodesc"
        assert "status" not in span2.get("tags", {})
        assert span2["op"] == "bar"
        assert span2["description"] == "bardesc"
        assert parent_span["transaction"] == "hi"
        assert "status" not in event["tags"]
        assert event["contexts"]["trace"]["status"] == "ok"
    else:
        assert not events


@pytest.mark.parametrize("sampled", [True, False, None])
@pytest.mark.parametrize("sample_rate", [0.0, 1.0])
def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_rate):
    """
    Ensure data is actually passed along via headers, and that they are read
    correctly.
    """
    sentry_init(traces_sample_rate=sample_rate)
    envelopes = capture_envelopes()

    # make a parent transaction (normally this would be in a different service)
    with start_transaction(name="hi", sampled=True if sample_rate == 0 else None):
        with start_span() as old_span:
            old_span.sampled = sampled
            headers = dict(
                sentry_sdk.get_current_scope().iter_trace_propagation_headers(old_span)
            )
            headers["baggage"] = (
                "other-vendor-value-1=foo;bar;baz, "
                "sentry-trace_id=771a43a4192642f0b136d5159a501700, "
                "sentry-public_key=49d0f7386ad645858ae85020e393bef3, "
                "sentry-sample_rate=0.01337, sentry-user_id=Amelie, "
                "other-vendor-value-2=foo;bar;"
            )

    # child transaction, to prove that we can read 'sentry-trace' header data correctly
    child_transaction = Transaction.continue_from_headers(headers, name="WRONG")
    assert child_transaction is not None
    assert child_transaction.parent_sampled == sampled
    assert child_transaction.trace_id == old_span.trace_id
    assert child_transaction.same_process_as_parent is False
    assert child_transaction.parent_span_id == old_span.span_id
    assert child_transaction.span_id != old_span.span_id

    baggage = child_transaction._baggage
    assert baggage
    assert not baggage.mutable
    assert baggage.sentry_items == {
        "public_key": "49d0f7386ad645858ae85020e393bef3",
        "trace_id": "771a43a4192642f0b136d5159a501700",
        "user_id": "Amelie",
        "sample_rate": "0.01337",
    }

    # add child transaction to the scope, to show that the captured message will
    # be tagged with the trace id (since it happens while the transaction is
    # open)
    with start_transaction(child_transaction):
        # change the transaction name from "WRONG" to make sure the change
        # is reflected in the final data
        sentry_sdk.get_current_scope().transaction = "ho"
        capture_message("hello")

    # in this case the child transaction won't be captured
    if sampled is False or (sample_rate == 0 and sampled is None):
        trace1, message = envelopes
        message_payload = message.get_event()
        trace1_payload = trace1.get_transaction_event()

        assert trace1_payload["transaction"] == "hi"
    else:
        trace1, message, trace2 = envelopes
        trace1_payload = trace1.get_transaction_event()
        message_payload = message.get_event()
        trace2_payload = trace2.get_transaction_event()

        assert trace1_payload["transaction"] == "hi"
        assert trace2_payload["transaction"] == "ho"

        assert (
            trace1_payload["contexts"]["trace"]["trace_id"]
            == trace2_payload["contexts"]["trace"]["trace_id"]
            == child_transaction.trace_id
            == message_payload["contexts"]["trace"]["trace_id"]
        )

        assert trace2.headers["trace"] == baggage.dynamic_sampling_context()
        assert trace2.headers["trace"] == {
            "public_key": "49d0f7386ad645858ae85020e393bef3",
            "trace_id": "771a43a4192642f0b136d5159a501700",
            "user_id": "Amelie",
            "sample_rate": "0.01337",
        }

    assert message_payload["message"] == "hello"


@pytest.mark.parametrize("sample_rate", [0.5, 1.0])
def test_dynamic_sampling_head_sdk_creates_dsc(
    sentry_init, capture_envelopes, sample_rate, monkeypatch
):
    sentry_init(traces_sample_rate=sample_rate, release="foo")
    envelopes = capture_envelopes()

    # make sure transaction is sampled for both cases
    monkeypatch.setattr(random, "random", lambda: 0.1)

    transaction = Transaction.continue_from_headers({}, name="Head SDK tx")

    # will create empty mutable baggage
    baggage = transaction._baggage
    assert baggage
    assert baggage.mutable
    assert baggage.sentry_items == {}
    assert baggage.third_party_items == ""

    with start_transaction(transaction):
        with start_span(op="foo", name="foodesc"):
            pass

    # finish will create a new baggage entry
    baggage = transaction._baggage
    trace_id = transaction.trace_id

    assert baggage
    assert not baggage.mutable
    assert baggage.third_party_items == ""
    assert baggage.sentry_items == {
        "environment": "production",
        "release": "foo",
        "sample_rate": str(sample_rate),
        "sampled": "true" if transaction.sampled else "false",
        "transaction": "Head SDK tx",
        "trace_id": trace_id,
    }

    expected_baggage = (
        "sentry-trace_id=%s,"
        "sentry-environment=production,"
        "sentry-release=foo,"
        "sentry-transaction=Head%%20SDK%%20tx,"
        "sentry-sample_rate=%s,"
        "sentry-sampled=%s"
        % (trace_id, sample_rate, "true" if transaction.sampled else "false")
    )
    assert baggage.serialize() == expected_baggage

    (envelope,) = envelopes
    assert envelope.headers["trace"] == baggage.dynamic_sampling_context()
    assert envelope.headers["trace"] == {
        "environment": "production",
        "release": "foo",
        "sample_rate": str(sample_rate),
        "sampled": "true" if transaction.sampled else "false",
        "transaction": "Head SDK tx",
        "trace_id": trace_id,
    }


@pytest.mark.parametrize(
    "args,expected_refcount",
    [({"traces_sample_rate": 1.0}, 100), ({"traces_sample_rate": 0.0}, 0)],
)
def test_memory_usage(sentry_init, capture_events, args, expected_refcount):
    sentry_init(**args)

    references = weakref.WeakSet()

    with start_transaction(name="hi"):
        for i in range(100):
            with start_span(op="helloworld", name="hi {}".format(i)) as span:

                def foo():
                    pass

                references.add(foo)
                span.set_tag("foo", foo)
                pass

        del foo
        del span

        # required only for pypy (cpython frees immediately)
        gc.collect()

        assert len(references) == expected_refcount


def test_transactions_do_not_go_through_before_send(sentry_init, capture_events):
    def before_send(event, hint):
        raise RuntimeError("should not be called")

    sentry_init(traces_sample_rate=1.0, before_send=before_send)
    events = capture_events()

    with start_transaction(name="/"):
        pass

    assert len(events) == 1


def test_start_span_after_finish(sentry_init, capture_events):
    class CustomTransport(Transport):
        def capture_envelope(self, envelope):
            pass

        def capture_event(self, event):
            start_span(op="toolate", name="justdont")
            pass

    sentry_init(traces_sample_rate=1, transport=CustomTransport())
    events = capture_events()

    with start_transaction(name="hi"):
        with start_span(op="bar", name="bardesc"):
            pass

    assert len(events) == 1


def test_trace_propagation_meta_head_sdk(sentry_init):
    sentry_init(traces_sample_rate=1.0, release="foo")

    transaction = Transaction.continue_from_headers({}, name="Head SDK tx")
    meta = None
    span = None

    with start_transaction(transaction):
        with start_span(op="foo", name="foodesc") as current_span:
            span = current_span
            meta = sentry_sdk.get_current_scope().trace_propagation_meta()

    ind = meta.find(">") + 1
    sentry_trace, baggage = meta[:ind], meta[ind:]

    assert 'meta name="sentry-trace"' in sentry_trace
    sentry_trace_content = re.findall('content="([^"]*)"', sentry_trace)[0]
    assert sentry_trace_content == span.to_traceparent()

    assert 'meta name="baggage"' in baggage
    baggage_content = re.findall('content="([^"]*)"', baggage)[0]
    assert baggage_content == transaction.get_baggage().serialize()
sentry-python-2.18.0/tests/tracing/test_misc.py000066400000000000000000000353541471214654000216200ustar00rootroot00000000000000import pytest
import gc
import uuid
import os
from unittest import mock
from unittest.mock import MagicMock

import sentry_sdk
from sentry_sdk import start_span, start_transaction, set_measurement
from sentry_sdk.consts import MATCH_ALL
from sentry_sdk.tracing import Span, Transaction
from sentry_sdk.tracing_utils import should_propagate_trace
from sentry_sdk.utils import Dsn


def test_span_trimming(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3})
    events = capture_events()

    with start_transaction(name="hi"):
        for i in range(10):
            with start_span(op="foo{}".format(i)):
                pass

    (event,) = events

    assert len(event["spans"]) == 3

    span1, span2, span3 = event["spans"]
    assert span1["op"] == "foo0"
    assert span2["op"] == "foo1"
    assert span3["op"] == "foo2"


def test_transaction_naming(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    # default name in event if no name is passed
    with start_transaction() as transaction:
        pass
    assert len(events) == 1
    assert events[0]["transaction"] == ""

    # the name can be set once the transaction's already started
    with start_transaction() as transaction:
        transaction.name = "name-known-after-transaction-started"
    assert len(events) == 2
    assert events[1]["transaction"] == "name-known-after-transaction-started"

    # passing in a name works, too
    with start_transaction(name="a"):
        pass
    assert len(events) == 3
    assert events[2]["transaction"] == "a"


def test_transaction_data(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction(name="test-transaction"):
        span_or_tx = sentry_sdk.get_current_span()
        span_or_tx.set_data("foo", "bar")
        with start_span(op="test-span") as span:
            span.set_data("spanfoo", "spanbar")

    assert len(events) == 1

    transaction = events[0]
    transaction_data = transaction["contexts"]["trace"]["data"]

    assert "data" not in transaction.keys()
    assert transaction_data.items() >= {"foo": "bar"}.items()

    assert len(transaction["spans"]) == 1

    span = transaction["spans"][0]
    span_data = span["data"]

    assert "contexts" not in span.keys()
    assert span_data.items() >= {"spanfoo": "spanbar"}.items()


def test_start_transaction(sentry_init):
    sentry_init(traces_sample_rate=1.0)

    # you can have it start a transaction for you
    result1 = start_transaction(
        name="/interactions/other-dogs/new-dog", op="greeting.sniff"
    )
    assert isinstance(result1, Transaction)
    assert result1.name == "/interactions/other-dogs/new-dog"
    assert result1.op == "greeting.sniff"

    # or you can pass it an already-created transaction
    preexisting_transaction = Transaction(
        name="/interactions/other-dogs/new-dog", op="greeting.sniff"
    )
    result2 = start_transaction(preexisting_transaction)
    assert result2 is preexisting_transaction


def test_finds_transaction_on_scope(sentry_init):
    sentry_init(traces_sample_rate=1.0)

    transaction = start_transaction(name="dogpark")

    scope = sentry_sdk.get_current_scope()

    # See note in Scope class re: getters and setters of the `transaction`
    # property. For the moment, assigning to scope.transaction merely sets the
    # transaction name, rather than putting the transaction on the scope, so we
    # have to assign to _span directly.
    scope._span = transaction

    # Reading scope.property, however, does what you'd expect, and returns the
    # transaction on the scope.
    assert scope.transaction is not None
    assert isinstance(scope.transaction, Transaction)
    assert scope.transaction.name == "dogpark"

    # If the transaction is also set as the span on the scope, it can be found
    # by accessing _span, too.
    assert scope._span is not None
    assert isinstance(scope._span, Transaction)
    assert scope._span.name == "dogpark"


def test_finds_transaction_when_descendent_span_is_on_scope(
    sentry_init,
):
    sentry_init(traces_sample_rate=1.0)

    transaction = start_transaction(name="dogpark")
    child_span = transaction.start_child(op="sniffing")

    scope = sentry_sdk.get_current_scope()
    scope._span = child_span

    # this is the same whether it's the transaction itself or one of its
    # decedents directly attached to the scope
    assert scope.transaction is not None
    assert isinstance(scope.transaction, Transaction)
    assert scope.transaction.name == "dogpark"

    # here we see that it is in fact the span on the scope, rather than the
    # transaction itself
    assert scope._span is not None
    assert isinstance(scope._span, Span)
    assert scope._span.op == "sniffing"


def test_finds_orphan_span_on_scope(sentry_init):
    # this is deprecated behavior which may be removed at some point (along with
    # the start_span function)
    sentry_init(traces_sample_rate=1.0)

    span = start_span(op="sniffing")

    scope = sentry_sdk.get_current_scope()
    scope._span = span

    assert scope._span is not None
    assert isinstance(scope._span, Span)
    assert scope._span.op == "sniffing"


def test_finds_non_orphan_span_on_scope(sentry_init):
    sentry_init(traces_sample_rate=1.0)

    transaction = start_transaction(name="dogpark")
    child_span = transaction.start_child(op="sniffing")

    scope = sentry_sdk.get_current_scope()
    scope._span = child_span

    assert scope._span is not None
    assert isinstance(scope._span, Span)
    assert scope._span.op == "sniffing"


def test_circular_references(monkeypatch, sentry_init, request):
    # TODO: We discovered while writing this test about transaction/span
    # reference cycles that there's actually also a circular reference in
    # `serializer.py`, between the functions `_serialize_node` and
    # `_serialize_node_impl`, both of which are defined inside of the main
    # `serialize` function, and each of which calls the other one. For now, in
    # order to avoid having those ref cycles give us a false positive here, we
    # can mock out `serialize`. In the long run, though, we should probably fix
    # that. (Whenever we do work on fixing it, it may be useful to add
    #
    #     gc.set_debug(gc.DEBUG_LEAK)
    #     request.addfinalizer(lambda: gc.set_debug(~gc.DEBUG_LEAK))
    #
    # immediately after the initial collection below, so we can see what new
    # objects the garbage collector has to clean up once `transaction.finish` is
    # called and the serializer runs.)
    monkeypatch.setattr(
        sentry_sdk.client,
        "serialize",
        mock.Mock(
            return_value=None,
        ),
    )

    # In certain versions of python, in some environments (specifically, python
    # 3.4 when run in GH Actions), we run into a `ctypes` bug which creates
    # circular references when `uuid4()` is called, as happens when we're
    # generating event ids. Mocking it with an implementation which doesn't use
    # the `ctypes` function lets us avoid having false positives when garbage
    # collecting. See https://bugs.python.org/issue20519.
    monkeypatch.setattr(
        uuid,
        "uuid4",
        mock.Mock(
            return_value=uuid.UUID(bytes=os.urandom(16)),
        ),
    )

    gc.disable()
    request.addfinalizer(gc.enable)

    sentry_init(traces_sample_rate=1.0)

    # Make sure that we're starting with a clean slate before we start creating
    # transaction/span reference cycles
    gc.collect()

    dogpark_transaction = start_transaction(name="dogpark")
    sniffing_span = dogpark_transaction.start_child(op="sniffing")
    wagging_span = dogpark_transaction.start_child(op="wagging")

    # At some point, you have to stop sniffing - there are balls to chase! - so finish
    # this span while the dogpark transaction is still open
    sniffing_span.finish()

    # The wagging, however, continues long past the dogpark, so that span will
    # NOT finish before the transaction ends. (Doing it in this order proves
    # that both finished and unfinished spans get their cycles broken.)
    dogpark_transaction.finish()

    # Eventually you gotta sleep...
    wagging_span.finish()

    # assuming there are no cycles by this point, these should all be able to go
    # out of scope and get their memory deallocated without the garbage
    # collector having anything to do
    del sniffing_span
    del wagging_span
    del dogpark_transaction

    assert gc.collect() == 0


def test_set_meaurement(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)

    events = capture_events()

    transaction = start_transaction(name="measuring stuff")

    with pytest.raises(TypeError):
        transaction.set_measurement()

    with pytest.raises(TypeError):
        transaction.set_measurement("metric.foo")

    transaction.set_measurement("metric.foo", 123)
    transaction.set_measurement("metric.bar", 456, unit="second")
    transaction.set_measurement("metric.baz", 420.69, unit="custom")
    transaction.set_measurement("metric.foobar", 12, unit="percent")
    transaction.set_measurement("metric.foobar", 17.99, unit="percent")

    transaction.finish()

    (event,) = events
    assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""}
    assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"}
    assert event["measurements"]["metric.baz"] == {"value": 420.69, "unit": "custom"}
    assert event["measurements"]["metric.foobar"] == {"value": 17.99, "unit": "percent"}


def test_set_meaurement_public_api(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)

    events = capture_events()

    with start_transaction(name="measuring stuff"):
        set_measurement("metric.foo", 123)
        set_measurement("metric.bar", 456, unit="second")

    (event,) = events
    assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""}
    assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"}


@pytest.mark.parametrize(
    "trace_propagation_targets,url,expected_propagation_decision",
    [
        (None, "http://example.com", False),
        ([], "http://example.com", False),
        ([MATCH_ALL], "http://example.com", True),
        (["localhost"], "localhost:8443/api/users", True),
        (["localhost"], "http://localhost:8443/api/users", True),
        (["localhost"], "mylocalhost:8080/api/users", True),
        ([r"^/api"], "/api/envelopes", True),
        ([r"^/api"], "/backend/api/envelopes", False),
        ([r"myApi.com/v[2-4]"], "myApi.com/v2/projects", True),
        ([r"myApi.com/v[2-4]"], "myApi.com/v1/projects", False),
        ([r"https:\/\/.*"], "https://example.com", True),
        (
            [r"https://.*"],
            "https://example.com",
            True,
        ),  # to show escaping is not needed
        ([r"https://.*"], "http://example.com/insecure/", False),
    ],
)
def test_should_propagate_trace(
    trace_propagation_targets, url, expected_propagation_decision
):
    client = MagicMock()

    # This test assumes the urls are not Sentry URLs. Use test_should_propagate_trace_to_sentry for sentry URLs.
    client.is_sentry_url = lambda _: False

    client.options = {"trace_propagation_targets": trace_propagation_targets}
    client.transport = MagicMock()
    client.transport.parsed_dsn = Dsn("https://bla@xxx.sentry.io/12312012")

    assert should_propagate_trace(client, url) == expected_propagation_decision


@pytest.mark.parametrize(
    "dsn,url,expected_propagation_decision",
    [
        (
            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
            "http://example.com",
            True,
        ),
        (
            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
            False,
        ),
        (
            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
            "http://squirrelchasers.ingest.sentry.io/12312012",
            False,
        ),
        (
            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
            "http://ingest.sentry.io/12312012",
            True,
        ),
        (
            "https://abc@localsentry.example.com/12312012",
            "http://localsentry.example.com",
            False,
        ),
    ],
)
def test_should_propagate_trace_to_sentry(
    sentry_init, dsn, url, expected_propagation_decision
):
    sentry_init(
        dsn=dsn,
        traces_sample_rate=1.0,
    )

    client = sentry_sdk.get_client()
    client.transport.parsed_dsn = Dsn(dsn)

    assert should_propagate_trace(client, url) == expected_propagation_decision


def test_start_transaction_updates_scope_name_source(sentry_init):
    sentry_init(traces_sample_rate=1.0)

    scope = sentry_sdk.get_current_scope()

    with start_transaction(name="foobar", source="route"):
        assert scope._transaction == "foobar"
        assert scope._transaction_info == {"source": "route"}


@pytest.mark.parametrize("sampled", (True, None))
def test_transaction_dropped_debug_not_started(sentry_init, sampled):
    sentry_init(enable_tracing=True)

    tx = Transaction(sampled=sampled)

    with mock.patch("sentry_sdk.tracing.logger") as mock_logger:
        with tx:
            pass

    mock_logger.debug.assert_any_call(
        "Discarding transaction because it was not started with sentry_sdk.start_transaction"
    )

    with pytest.raises(AssertionError):
        # We should NOT see the "sampled = False" message here
        mock_logger.debug.assert_any_call(
            "Discarding transaction because sampled = False"
        )


def test_transaction_dropeed_sampled_false(sentry_init):
    sentry_init(enable_tracing=True)

    tx = Transaction(sampled=False)

    with mock.patch("sentry_sdk.tracing.logger") as mock_logger:
        with sentry_sdk.start_transaction(tx):
            pass

    mock_logger.debug.assert_any_call("Discarding transaction because sampled = False")

    with pytest.raises(AssertionError):
        # We should not see the "not started" message here
        mock_logger.debug.assert_any_call(
            "Discarding transaction because it was not started with sentry_sdk.start_transaction"
        )


def test_transaction_not_started_warning(sentry_init):
    sentry_init(enable_tracing=True)

    tx = Transaction()

    with mock.patch("sentry_sdk.tracing.logger") as mock_logger:
        with tx:
            pass

    mock_logger.debug.assert_any_call(
        "Transaction was entered without being started with sentry_sdk.start_transaction."
        "The transaction will not be sent to Sentry. To fix, start the transaction by"
        "passing it to sentry_sdk.start_transaction."
    )
sentry-python-2.18.0/tests/tracing/test_noop_span.py000066400000000000000000000033211471214654000226460ustar00rootroot00000000000000import sentry_sdk
from sentry_sdk.tracing import NoOpSpan

# These tests make sure that the examples from the documentation [1]
# are working when OTel (OpenTelemetry) instrumentation is turned on,
# and therefore, the Sentry tracing should not do anything.
#
# 1: https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/


def test_noop_start_transaction(sentry_init):
    sentry_init(instrumenter="otel")

    with sentry_sdk.start_transaction(
        op="task", name="test_transaction_name"
    ) as transaction:
        assert isinstance(transaction, NoOpSpan)
        assert sentry_sdk.get_current_scope().span is transaction

        transaction.name = "new name"


def test_noop_start_span(sentry_init):
    sentry_init(instrumenter="otel")

    with sentry_sdk.start_span(op="http", name="GET /") as span:
        assert isinstance(span, NoOpSpan)
        assert sentry_sdk.get_current_scope().span is span

        span.set_tag("http.response.status_code", 418)
        span.set_data("http.entity_type", "teapot")


def test_noop_transaction_start_child(sentry_init):
    sentry_init(instrumenter="otel")

    transaction = sentry_sdk.start_transaction(name="task")
    assert isinstance(transaction, NoOpSpan)

    with transaction.start_child(op="child_task") as child:
        assert isinstance(child, NoOpSpan)
        assert sentry_sdk.get_current_scope().span is child


def test_noop_span_start_child(sentry_init):
    sentry_init(instrumenter="otel")
    span = sentry_sdk.start_span(name="task")
    assert isinstance(span, NoOpSpan)

    with span.start_child(op="child_task") as child:
        assert isinstance(child, NoOpSpan)
        assert sentry_sdk.get_current_scope().span is child
sentry-python-2.18.0/tests/tracing/test_propagation.py000066400000000000000000000024031471214654000231750ustar00rootroot00000000000000import sentry_sdk
import pytest


def test_standalone_span_iter_headers(sentry_init):
    sentry_init(enable_tracing=True)

    with sentry_sdk.start_span(op="test") as span:
        with pytest.raises(StopIteration):
            # We should not have any propagation headers
            next(span.iter_headers())


def test_span_in_span_iter_headers(sentry_init):
    sentry_init(enable_tracing=True)

    with sentry_sdk.start_span(op="test"):
        with sentry_sdk.start_span(op="test2") as span_inner:
            with pytest.raises(StopIteration):
                # We should not have any propagation headers
                next(span_inner.iter_headers())


def test_span_in_transaction(sentry_init):
    sentry_init(enable_tracing=True)

    with sentry_sdk.start_transaction(op="test"):
        with sentry_sdk.start_span(op="test2") as span:
            # Ensure the headers are there
            next(span.iter_headers())


def test_span_in_span_in_transaction(sentry_init):
    sentry_init(enable_tracing=True)

    with sentry_sdk.start_transaction(op="test"):
        with sentry_sdk.start_span(op="test2"):
            with sentry_sdk.start_span(op="test3") as span_inner:
                # Ensure the headers are there
                next(span_inner.iter_headers())
sentry-python-2.18.0/tests/tracing/test_sampling.py000066400000000000000000000250631471214654000224730ustar00rootroot00000000000000import random
from collections import Counter
from unittest import mock

import pytest

import sentry_sdk
from sentry_sdk import start_span, start_transaction, capture_exception
from sentry_sdk.tracing import Transaction
from sentry_sdk.utils import logger


def test_sampling_decided_only_for_transactions(sentry_init, capture_events):
    sentry_init(traces_sample_rate=0.5)

    with start_transaction(name="hi") as transaction:
        assert transaction.sampled is not None

        with start_span() as span:
            assert span.sampled == transaction.sampled

    with start_span() as span:
        assert span.sampled is None


@pytest.mark.parametrize("sampled", [True, False])
def test_nested_transaction_sampling_override(sentry_init, sampled):
    sentry_init(traces_sample_rate=1.0)

    with start_transaction(name="outer", sampled=sampled) as outer_transaction:
        assert outer_transaction.sampled is sampled
        with start_transaction(
            name="inner", sampled=(not sampled)
        ) as inner_transaction:
            assert inner_transaction.sampled is not sampled
        assert outer_transaction.sampled is sampled


def test_no_double_sampling(sentry_init, capture_events):
    # Transactions should not be subject to the global/error sample rate.
    # Only the traces_sample_rate should apply.
    sentry_init(traces_sample_rate=1.0, sample_rate=0.0)
    events = capture_events()

    with start_transaction(name="/"):
        pass

    assert len(events) == 1


@pytest.mark.parametrize("sampling_decision", [True, False])
def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision(
    sentry_init, sampling_decision
):
    sentry_init(traces_sample_rate=1.0)

    with start_transaction(name="/", sampled=sampling_decision):
        with start_span(op="child-span"):
            with start_span(op="child-child-span"):
                scope = sentry_sdk.get_current_scope()
                assert scope.span.op == "child-child-span"
                assert scope.transaction.name == "/"


@pytest.mark.parametrize(
    "traces_sample_rate,expected_decision",
    [(0.0, False), (0.25, False), (0.75, True), (1.00, True)],
)
def test_uses_traces_sample_rate_correctly(
    sentry_init,
    traces_sample_rate,
    expected_decision,
):
    sentry_init(traces_sample_rate=traces_sample_rate)

    with mock.patch.object(random, "random", return_value=0.5):
        transaction = start_transaction(name="dogpark")
        assert transaction.sampled is expected_decision


@pytest.mark.parametrize(
    "traces_sampler_return_value,expected_decision",
    [(0.0, False), (0.25, False), (0.75, True), (1.00, True)],
)
def test_uses_traces_sampler_return_value_correctly(
    sentry_init,
    traces_sampler_return_value,
    expected_decision,
):
    sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))

    with mock.patch.object(random, "random", return_value=0.5):
        transaction = start_transaction(name="dogpark")
        assert transaction.sampled is expected_decision


@pytest.mark.parametrize("traces_sampler_return_value", [True, False])
def test_tolerates_traces_sampler_returning_a_boolean(
    sentry_init, traces_sampler_return_value
):
    sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))

    transaction = start_transaction(name="dogpark")
    assert transaction.sampled is traces_sampler_return_value


@pytest.mark.parametrize("sampling_decision", [True, False])
def test_only_captures_transaction_when_sampled_is_true(
    sentry_init, sampling_decision, capture_events
):
    sentry_init(traces_sampler=mock.Mock(return_value=sampling_decision))
    events = capture_events()

    transaction = start_transaction(name="dogpark")
    transaction.finish()

    assert len(events) == (1 if sampling_decision else 0)


@pytest.mark.parametrize(
    "traces_sample_rate,traces_sampler_return_value", [(0, True), (1, False)]
)
def test_prefers_traces_sampler_to_traces_sample_rate(
    sentry_init,
    traces_sample_rate,
    traces_sampler_return_value,
):
    # make traces_sample_rate imply the opposite of traces_sampler, to prove
    # that traces_sampler takes precedence
    traces_sampler = mock.Mock(return_value=traces_sampler_return_value)
    sentry_init(
        traces_sample_rate=traces_sample_rate,
        traces_sampler=traces_sampler,
    )

    transaction = start_transaction(name="dogpark")
    assert traces_sampler.called is True
    assert transaction.sampled is traces_sampler_return_value


@pytest.mark.parametrize("parent_sampling_decision", [True, False])
def test_ignores_inherited_sample_decision_when_traces_sampler_defined(
    sentry_init, parent_sampling_decision
):
    # make traces_sampler pick the opposite of the inherited decision, to prove
    # that traces_sampler takes precedence
    traces_sampler = mock.Mock(return_value=not parent_sampling_decision)
    sentry_init(traces_sampler=traces_sampler)

    transaction = start_transaction(
        name="dogpark", parent_sampled=parent_sampling_decision
    )
    assert transaction.sampled is not parent_sampling_decision


@pytest.mark.parametrize("explicit_decision", [True, False])
def test_traces_sampler_doesnt_overwrite_explicitly_passed_sampling_decision(
    sentry_init, explicit_decision
):
    # make traces_sampler pick the opposite of the explicit decision, to prove
    # that the explicit decision takes precedence
    traces_sampler = mock.Mock(return_value=not explicit_decision)
    sentry_init(traces_sampler=traces_sampler)

    transaction = start_transaction(name="dogpark", sampled=explicit_decision)
    assert transaction.sampled is explicit_decision


@pytest.mark.parametrize("parent_sampling_decision", [True, False])
def test_inherits_parent_sampling_decision_when_traces_sampler_undefined(
    sentry_init, parent_sampling_decision
):
    # make sure the parent sampling decision is the opposite of what
    # traces_sample_rate would produce, to prove the inheritance takes
    # precedence
    sentry_init(traces_sample_rate=0.5)
    mock_random_value = 0.25 if parent_sampling_decision is False else 0.75

    with mock.patch.object(random, "random", return_value=mock_random_value):
        transaction = start_transaction(
            name="dogpark", parent_sampled=parent_sampling_decision
        )
        assert transaction.sampled is parent_sampling_decision


@pytest.mark.parametrize("parent_sampling_decision", [True, False])
def test_passes_parent_sampling_decision_in_sampling_context(
    sentry_init, parent_sampling_decision
):
    sentry_init(traces_sample_rate=1.0)

    sentry_trace_header = (
        "12312012123120121231201212312012-1121201211212012-{sampled}".format(
            sampled=int(parent_sampling_decision)
        )
    )

    transaction = Transaction.continue_from_headers(
        headers={"sentry-trace": sentry_trace_header}, name="dogpark"
    )
    spy = mock.Mock(wraps=transaction)
    start_transaction(transaction=spy)

    # there's only one call (so index at 0) and kwargs are always last in a call
    # tuple (so index at -1)
    sampling_context = spy._set_initial_sampling_decision.mock_calls[0][-1][
        "sampling_context"
    ]
    assert "parent_sampled" in sampling_context
    # because we passed in a spy, attribute access requires unwrapping
    assert sampling_context["parent_sampled"]._mock_wraps is parent_sampling_decision


def test_passes_custom_samling_context_from_start_transaction_to_traces_sampler(
    sentry_init, DictionaryContaining  # noqa: N803
):
    traces_sampler = mock.Mock()
    sentry_init(traces_sampler=traces_sampler)

    start_transaction(custom_sampling_context={"dogs": "yes", "cats": "maybe"})

    traces_sampler.assert_any_call(
        DictionaryContaining({"dogs": "yes", "cats": "maybe"})
    )


def test_sample_rate_affects_errors(sentry_init, capture_events):
    sentry_init(sample_rate=0)
    events = capture_events()

    try:
        1 / 0
    except Exception:
        capture_exception()

    assert len(events) == 0


@pytest.mark.parametrize(
    "traces_sampler_return_value",
    [
        "dogs are great",  # wrong type
        (0, 1),  # wrong type
        {"Maisey": "Charllie"},  # wrong type
        [True, True],  # wrong type
        {0.2012},  # wrong type
        float("NaN"),  # wrong type
        None,  # wrong type
        -1.121,  # wrong value
        1.231,  # wrong value
    ],
)
def test_warns_and_sets_sampled_to_false_on_invalid_traces_sampler_return_value(
    sentry_init, traces_sampler_return_value, StringContaining  # noqa: N803
):
    sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))

    with mock.patch.object(logger, "warning", mock.Mock()):
        transaction = start_transaction(name="dogpark")
        logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
        assert transaction.sampled is False


@pytest.mark.parametrize(
    "traces_sample_rate,sampled_output,expected_record_lost_event_calls",
    [
        (None, False, []),
        (
            0.0,
            False,
            [("sample_rate", "transaction", None, 1), ("sample_rate", "span", None, 1)],
        ),
        (1.0, True, []),
    ],
)
def test_records_lost_event_only_if_traces_sample_rate_enabled(
    sentry_init,
    capture_record_lost_event_calls,
    traces_sample_rate,
    sampled_output,
    expected_record_lost_event_calls,
):
    sentry_init(traces_sample_rate=traces_sample_rate)
    record_lost_event_calls = capture_record_lost_event_calls()

    transaction = start_transaction(name="dogpark")
    assert transaction.sampled is sampled_output
    transaction.finish()

    # Use Counter because order of calls does not matter
    assert Counter(record_lost_event_calls) == Counter(expected_record_lost_event_calls)


@pytest.mark.parametrize(
    "traces_sampler,sampled_output,expected_record_lost_event_calls",
    [
        (None, False, []),
        (
            lambda _x: 0.0,
            False,
            [("sample_rate", "transaction", None, 1), ("sample_rate", "span", None, 1)],
        ),
        (lambda _x: 1.0, True, []),
    ],
)
def test_records_lost_event_only_if_traces_sampler_enabled(
    sentry_init,
    capture_record_lost_event_calls,
    traces_sampler,
    sampled_output,
    expected_record_lost_event_calls,
):
    sentry_init(traces_sampler=traces_sampler)
    record_lost_event_calls = capture_record_lost_event_calls()

    transaction = start_transaction(name="dogpark")
    assert transaction.sampled is sampled_output
    transaction.finish()

    # Use Counter because order of calls does not matter
    assert Counter(record_lost_event_calls) == Counter(expected_record_lost_event_calls)
sentry-python-2.18.0/tests/tracing/test_span_name.py000066400000000000000000000032301471214654000226120ustar00rootroot00000000000000import pytest

import sentry_sdk


def test_start_span_description(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    with sentry_sdk.start_transaction(name="hi"):
        with pytest.deprecated_call():
            with sentry_sdk.start_span(op="foo", description="span-desc"):
                ...

    (event,) = events

    assert event["spans"][0]["description"] == "span-desc"


def test_start_span_name(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    with sentry_sdk.start_transaction(name="hi"):
        with sentry_sdk.start_span(op="foo", name="span-name"):
            ...

    (event,) = events

    assert event["spans"][0]["description"] == "span-name"


def test_start_child_description(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    with sentry_sdk.start_transaction(name="hi"):
        with pytest.deprecated_call():
            with sentry_sdk.start_span(op="foo", description="span-desc") as span:
                with span.start_child(op="bar", description="child-desc"):
                    ...

    (event,) = events

    assert event["spans"][-1]["description"] == "child-desc"


def test_start_child_name(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    with sentry_sdk.start_transaction(name="hi"):
        with sentry_sdk.start_span(op="foo", name="span-name") as span:
            with span.start_child(op="bar", name="child-name"):
                ...

    (event,) = events

    assert event["spans"][-1]["description"] == "child-name"
sentry-python-2.18.0/tests/tracing/test_span_origin.py000066400000000000000000000023171471214654000231660ustar00rootroot00000000000000from sentry_sdk import start_transaction, start_span


def test_span_origin_manual(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction(name="hi"):
        with start_span(op="foo", name="bar"):
            pass

    (event,) = events

    assert len(events) == 1
    assert event["spans"][0]["origin"] == "manual"
    assert event["contexts"]["trace"]["origin"] == "manual"


def test_span_origin_custom(sentry_init, capture_events):
    sentry_init(traces_sample_rate=1.0)
    events = capture_events()

    with start_transaction(name="hi"):
        with start_span(op="foo", name="bar", origin="foo.foo2.foo3"):
            pass

    with start_transaction(name="ho", origin="ho.ho2.ho3"):
        with start_span(op="baz", name="qux", origin="baz.baz2.baz3"):
            pass

    (first_transaction, second_transaction) = events

    assert len(events) == 2
    assert first_transaction["contexts"]["trace"]["origin"] == "manual"
    assert first_transaction["spans"][0]["origin"] == "foo.foo2.foo3"

    assert second_transaction["contexts"]["trace"]["origin"] == "ho.ho2.ho3"
    assert second_transaction["spans"][0]["origin"] == "baz.baz2.baz3"
sentry-python-2.18.0/tests/utils/000077500000000000000000000000001471214654000167535ustar00rootroot00000000000000sentry-python-2.18.0/tests/utils/__init__.py000066400000000000000000000000501471214654000210570ustar00rootroot00000000000000# Make this a module for test_abs_path.
sentry-python-2.18.0/tests/utils/test_contextvars.py000066400000000000000000000014321471214654000227440ustar00rootroot00000000000000import pytest
import random
import time


@pytest.mark.forked
def test_leaks(maybe_monkeypatched_threading):
    import threading

    # Need to explicitly call _get_contextvars because the SDK has already
    # decided upon gevent on import.

    from sentry_sdk import utils

    _, ContextVar = utils._get_contextvars()  # noqa: N806

    ts = []

    var = ContextVar("test_contextvar_leaks")

    success = []

    def run():
        value = int(random.random() * 1000)
        var.set(value)

        for _ in range(100):
            time.sleep(0)
            assert var.get(None) == value

        success.append(1)

    for _ in range(20):
        t = threading.Thread(target=run)
        t.start()
        ts.append(t)

    for t in ts:
        t.join()

    assert len(success) == 20
sentry-python-2.18.0/tests/utils/test_general.py000066400000000000000000000402401471214654000220010ustar00rootroot00000000000000import sys
import os

import pytest


from sentry_sdk.utils import (
    BadDsn,
    Dsn,
    safe_repr,
    exceptions_from_error_tuple,
    filename_for_module,
    iter_event_stacktraces,
    to_base64,
    from_base64,
    set_in_app_in_frames,
    strip_string,
    AnnotatedValue,
)
from sentry_sdk.consts import EndpointType


try:
    from hypothesis import given
    import hypothesis.strategies as st
except ImportError:
    pass
else:
    any_string = st.one_of(st.binary(), st.text())

    @given(x=any_string)
    def test_safe_repr_never_broken_for_strings(x):
        r = safe_repr(x)
        assert isinstance(r, str)
        assert "broken repr" not in r


def test_safe_repr_regressions():
    assert "лошадь" in safe_repr("лошадь")


@pytest.mark.parametrize("prefix", ("", "abcd", "лошадь"))
@pytest.mark.parametrize("character", "\x00\x07\x1b\n")
def test_safe_repr_non_printable(prefix, character):
    """Check that non-printable characters are escaped"""
    string = prefix + character
    assert character not in safe_repr(string)
    assert character not in safe_repr(string.encode("utf-8"))


def test_abs_path():
    """Check if abs_path is actually an absolute path. This can happen either
    with eval/exec like here, or when the file in the frame is relative to
    __main__"""

    code = compile("1/0", "test.py", "exec")
    try:
        exec(code, {})
    except Exception:
        exceptions = exceptions_from_error_tuple(sys.exc_info())

    (exception,) = exceptions
    frame1, frame2 = frames = exception["stacktrace"]["frames"]

    for frame in frames:
        assert os.path.abspath(frame["abs_path"]) == frame["abs_path"]

    assert frame1["filename"] == "tests/utils/test_general.py"
    assert frame2["filename"] == "test.py"


def test_filename():
    x = filename_for_module

    assert x("bogus", "bogus") == "bogus"

    assert x("os", os.__file__) == "os.py"

    import sentry_sdk.utils

    assert x("sentry_sdk.utils", sentry_sdk.utils.__file__) == "sentry_sdk/utils.py"


@pytest.mark.parametrize(
    "given,expected_envelope",
    [
        (
            "https://foobar@sentry.io/123",
            "https://sentry.io/api/123/envelope/",
        ),
        (
            "https://foobar@sentry.io/bam/123",
            "https://sentry.io/bam/api/123/envelope/",
        ),
        (
            "https://foobar@sentry.io/bam/baz/123",
            "https://sentry.io/bam/baz/api/123/envelope/",
        ),
    ],
)
def test_parse_dsn_paths(given, expected_envelope):
    dsn = Dsn(given)
    auth = dsn.to_auth()
    assert auth.get_api_url() == expected_envelope
    assert auth.get_api_url(EndpointType.ENVELOPE) == expected_envelope


@pytest.mark.parametrize(
    "dsn",
    [
        "https://foobar@sentry.io"
        "https://foobar@sentry.io/"
        "https://foobar@sentry.io/asdf"
        "https://foobar@sentry.io/asdf/"
        "https://foobar@sentry.io/asdf/123/"
    ],
)
def test_parse_invalid_dsn(dsn):
    with pytest.raises(BadDsn):
        dsn = Dsn(dsn)


@pytest.mark.parametrize(
    "frame,in_app_include,in_app_exclude,project_root,resulting_frame",
    [
        [
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            None,
            None,
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            None,
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": True,
            },
            None,
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": True,
            },
        ],
        [
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
            },
            None,
            None,
            None,
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
            },
            None,
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            None,
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
        ],
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            None,
            None,
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
        ],
        # include
        [
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,  # because there is no module set
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": True,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
            ["fastapi"],
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
                "in_app": False,  # because there is no module set
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
                "in_app": True,
            },
        ],
        [
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
        ],
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            ["fastapi"],
            None,
            None,
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
        ],
        # exclude
        [
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            None,
            ["main"],
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
            },
            None,
            ["main"],
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": True,
            },
            None,
            ["main"],
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
                "in_app": True,
            },
        ],
        [
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
            },
            None,
            ["main"],
            None,
            {
                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
            },
            None,
            ["main"],
            None,
            {
                "module": "fastapi.routing",
                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
                "in_app": False,
            },
        ],
        [
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            ["main"],
            None,
            {
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
        ],
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            ["main"],
            None,
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
                "in_app": False,
            },
        ],
        [
            {
                "module": "fastapi.routing",
            },
            None,
            None,
            None,
            {
                "module": "fastapi.routing",
            },
        ],
        [
            {
                "module": "fastapi.routing",
            },
            ["fastapi"],
            None,
            None,
            {
                "module": "fastapi.routing",
                "in_app": True,
            },
        ],
        [
            {
                "module": "fastapi.routing",
            },
            None,
            ["fastapi"],
            None,
            {
                "module": "fastapi.routing",
                "in_app": False,
            },
        ],
        # with project_root set
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            None,
            "/home/ubuntu/fastapi",
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
                "in_app": True,
            },
        ],
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            ["main"],
            None,
            "/home/ubuntu/fastapi",
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
                "in_app": True,
            },
        ],
        [
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
            },
            None,
            ["main"],
            "/home/ubuntu/fastapi",
            {
                "module": "main",
                "abs_path": "/home/ubuntu/fastapi/main.py",
                "in_app": False,
            },
        ],
    ],
)
def test_set_in_app_in_frames(
    frame, in_app_include, in_app_exclude, project_root, resulting_frame
):
    new_frames = set_in_app_in_frames(
        [frame],
        in_app_include=in_app_include,
        in_app_exclude=in_app_exclude,
        project_root=project_root,
    )

    assert new_frames[0] == resulting_frame


def test_iter_stacktraces():
    assert set(
        iter_event_stacktraces(
            {
                "threads": {"values": [{"stacktrace": 1}]},
                "stacktrace": 2,
                "exception": {"values": [{"stacktrace": 3}]},
            }
        )
    ) == {1, 2, 3}


@pytest.mark.parametrize(
    ("original", "base64_encoded"),
    [
        # ascii only
        ("Dogs are great!", "RG9ncyBhcmUgZ3JlYXQh"),
        # emoji
        ("🐶", "8J+Qtg=="),
        # non-ascii
        (
            "Καλό κορίτσι, Μάιζεϊ!",
            "zprOsc67z4wgzrrOv8+Bzq/PhM+DzrksIM6czqzOuc62zrXPiiE=",
        ),
        # mix of ascii and non-ascii
        (
            "Of margir hundar! Ég geri ráð fyrir að ég þurfi stærra rúm.",
            "T2YgbWFyZ2lyIGh1bmRhciEgw4lnIGdlcmkgcsOhw7AgZnlyaXIgYcOwIMOpZyDDvnVyZmkgc3TDpnJyYSByw7ptLg==",
        ),
    ],
)
def test_successful_base64_conversion(original, base64_encoded):
    # all unicode characters should be handled correctly
    assert to_base64(original) == base64_encoded
    assert from_base64(base64_encoded) == original

    # "to" and "from" should be inverses
    assert from_base64(to_base64(original)) == original
    assert to_base64(from_base64(base64_encoded)) == base64_encoded


@pytest.mark.parametrize(
    "input",
    [
        1231,  # incorrect type
        True,  # incorrect type
        [],  # incorrect type
        {},  # incorrect type
        None,  # incorrect type
        "yayfordogs",  # wrong length
        "#dog",  # invalid ascii character
        "🐶",  # non-ascii character
    ],
)
def test_failed_base64_conversion(input):
    # conversion from base64 should fail if given input of the wrong type or
    # input which isn't a valid base64 string
    assert from_base64(input) is None

    # any string can be converted to base64, so only type errors will cause
    # failures
    if not isinstance(input, str):
        assert to_base64(input) is None


@pytest.mark.parametrize(
    "input,max_length,result",
    [
        [None, None, None],
        ["a" * 256, None, "a" * 256],
        [
            "a" * 257,
            256,
            AnnotatedValue(
                value="a" * 253 + "...",
                metadata={"len": 257, "rem": [["!limit", "x", 253, 256]]},
            ),
        ],
        ["éééé", None, "éééé"],
        [
            "éééé",
            5,
            AnnotatedValue(
                value="é...", metadata={"len": 8, "rem": [["!limit", "x", 2, 5]]}
            ),
        ],
    ],
)
def test_strip_string(input, max_length, result):
    assert strip_string(input, max_length) == result
sentry-python-2.18.0/tests/utils/test_transaction.py000066400000000000000000000025271471214654000227170ustar00rootroot00000000000000from functools import partial, partialmethod

from sentry_sdk.utils import transaction_from_function


class MyClass:
    def myfunc(self):
        pass


def myfunc():
    pass


@partial
def my_partial():
    pass


my_lambda = lambda: None

my_partial_lambda = partial(lambda: None)


def test_transaction_from_function():
    x = transaction_from_function
    assert x(MyClass) == "tests.utils.test_transaction.MyClass"
    assert x(MyClass.myfunc) == "tests.utils.test_transaction.MyClass.myfunc"
    assert x(myfunc) == "tests.utils.test_transaction.myfunc"
    assert x(None) is None
    assert x(42) is None
    assert x(lambda: None).endswith("")
    assert x(my_lambda) == "tests.utils.test_transaction."
    assert (
        x(my_partial) == "partial()"
    )
    assert (
        x(my_partial_lambda)
        == "partial(>)"
    )


def test_transaction_from_function_partialmethod():
    x = transaction_from_function

    class MyPartialClass:
        @partialmethod
        def my_partial_method(self):
            pass

    assert (
        x(MyPartialClass.my_partial_method)
        == "partialmethod(.MyPartialClass.my_partial_method>)"
    )
sentry-python-2.18.0/tox.ini000066400000000000000000000562721471214654000160000ustar00rootroot00000000000000# Tox (http://codespeak.net/~hpk/tox/) is a tool for running tests
# in multiple virtualenvs. This configuration file will run the
# test suite on all supported python versions. To use it, "pip install tox"
# and then run "tox" from this directory.

[tox]
requires =
    # This version introduced using pip 24.1 which does not work with older Celery and HTTPX versions.
    virtualenv<20.26.3
envlist =
    # === Common ===
    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common

    # === Gevent ===
    {py3.6,py3.8,py3.10,py3.11,py3.12}-gevent

    # === Integrations ===
    # General format is {pythonversion}-{integrationname}-v{frameworkversion}
    # 1 blank line between different integrations
    # Each framework version should only be mentioned once. I.e:
    #   {py3.7,py3.10}-django-v{3.2}
    #   {py3.10}-django-v{4.0}
    # instead of:
    #   {py3.7}-django-v{3.2}
    #   {py3.7,py3.10}-django-v{3.2,4.0}
    #
    # At a minimum, we should test against at least the lowest
    # and the latest supported version of a framework.

    # AIOHTTP
    {py3.7}-aiohttp-v{3.4}
    {py3.7,py3.9,py3.11}-aiohttp-v{3.8}
    {py3.8,py3.12,py3.13}-aiohttp-latest

    # Anthropic
    {py3.7,py3.11,py3.12}-anthropic-v{0.16,0.25}
    {py3.7,py3.11,py3.12}-anthropic-latest

    # Ariadne
    {py3.8,py3.11}-ariadne-v{0.20}
    {py3.8,py3.12,py3.13}-ariadne-latest

    # Arq
    {py3.7,py3.11}-arq-v{0.23}
    {py3.7,py3.12,py3.13}-arq-latest

    # Asgi
    {py3.7,py3.12,py3.13}-asgi

    # asyncpg
    {py3.7,py3.10}-asyncpg-v{0.23}
    {py3.8,py3.11,py3.12}-asyncpg-latest

    # AWS Lambda
    # The aws_lambda tests deploy to the real AWS and have their own
    # matrix of Python versions to run the test lambda function in.
    # see `lambda_runtime` fixture in tests/integrations/aws_lambda.py
    {py3.9}-aws_lambda

    # Beam
    {py3.7}-beam-v{2.12}
    {py3.8,py3.11}-beam-latest

    # Boto3
    {py3.6,py3.7}-boto3-v{1.12}
    {py3.7,py3.11,py3.12}-boto3-v{1.23}
    {py3.11,py3.12}-boto3-v{1.34}
    {py3.11,py3.12,py3.13}-boto3-latest

    # Bottle
    {py3.6,py3.9}-bottle-v{0.12}
    {py3.6,py3.12,py3.13}-bottle-latest

    # Celery
    {py3.6,py3.8}-celery-v{4}
    {py3.6,py3.8}-celery-v{5.0}
    {py3.7,py3.10}-celery-v{5.1,5.2}
    {py3.8,py3.11,py3.12}-celery-v{5.3,5.4}
    {py3.8,py3.12,py3.13}-celery-latest

    # Chalice
    {py3.6,py3.9}-chalice-v{1.16}
    {py3.8,py3.12,py3.13}-chalice-latest

    # Clickhouse Driver
    {py3.8,py3.11}-clickhouse_driver-v{0.2.0}
    {py3.8,py3.12,py3.13}-clickhouse_driver-latest

    # Cloud Resource Context
    {py3.6,py3.12,py3.13}-cloud_resource_context

    # Cohere
    {py3.9,py3.11,py3.12}-cohere-v5
    {py3.9,py3.11,py3.12}-cohere-latest

    # Django
    # - Django 1.x
    {py3.6,py3.7}-django-v{1.11}
    # - Django 2.x
    {py3.6,py3.7}-django-v{2.0}
    {py3.6,py3.9}-django-v{2.2}
    # - Django 3.x
    {py3.6,py3.9}-django-v{3.0}
    {py3.6,py3.9,py3.11}-django-v{3.2}
    # - Django 4.x
    {py3.8,py3.11,py3.12}-django-v{4.0,4.1,4.2}
    # - Django 5.x
    {py3.10,py3.11,py3.12}-django-v{5.0,5.1}
    {py3.10,py3.12,py3.13}-django-latest

    # dramatiq
    {py3.6,py3.9}-dramatiq-v{1.13}
    {py3.7,py3.10,py3.11}-dramatiq-v{1.15}
    {py3.8,py3.11,py3.12}-dramatiq-v{1.17}
    {py3.8,py3.11,py3.12}-dramatiq-latest

    # Falcon
    {py3.6,py3.7}-falcon-v{1,1.4,2}
    {py3.6,py3.11,py3.12}-falcon-v{3}
    {py3.8,py3.11,py3.12}-falcon-v{4}
    {py3.7,py3.11,py3.12}-falcon-latest

    # FastAPI
    {py3.7,py3.10}-fastapi-v{0.79}
    {py3.8,py3.12,py3.13}-fastapi-latest

    # Flask
    {py3.6,py3.8}-flask-v{1}
    {py3.8,py3.11,py3.12}-flask-v{2}
    {py3.10,py3.11,py3.12}-flask-v{3}
    {py3.10,py3.12,py3.13}-flask-latest

    # GCP
    {py3.7}-gcp

    # GQL
    {py3.7,py3.11}-gql-v{3.4}
    {py3.7,py3.12,py3.13}-gql-latest

    # Graphene
    {py3.7,py3.11}-graphene-v{3.3}
    {py3.7,py3.12,py3.13}-graphene-latest

    # gRPC
    {py3.7,py3.9}-grpc-v{1.39}
    {py3.7,py3.10}-grpc-v{1.49}
    {py3.7,py3.11}-grpc-v{1.59}
    {py3.8,py3.11,py3.12}-grpc-latest

    # HTTPX
    {py3.6,py3.9}-httpx-v{0.16,0.18}
    {py3.6,py3.10}-httpx-v{0.20,0.22}
    {py3.7,py3.11,py3.12}-httpx-v{0.23,0.24}
    {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27}
    {py3.9,py3.12,py3.13}-httpx-latest

    # Huey
    {py3.6,py3.11,py3.12}-huey-v{2.0}
    {py3.6,py3.12,py3.13}-huey-latest

    # Huggingface Hub
    {py3.9,py3.12,py3.13}-huggingface_hub-{v0.22}
    {py3.9,py3.12,py3.13}-huggingface_hub-latest

    # Langchain
    {py3.9,py3.11,py3.12}-langchain-v0.1
    {py3.9,py3.11,py3.12}-langchain-latest
    {py3.9,py3.11,py3.12}-langchain-notiktoken

    # Litestar
    # litestar 2.0.0 is the earliest version that supports Python < 3.12
    {py3.8,py3.11}-litestar-v{2.0}
    # litestar 2.3.0 is the earliest version that supports Python 3.12
    {py3.12}-litestar-v{2.3}
    {py3.8,py3.11,py3.12}-litestar-v{2.5}
    {py3.8,py3.11,py3.12}-litestar-latest

    # Loguru
    {py3.6,py3.11,py3.12}-loguru-v{0.5}
    {py3.6,py3.12,py3.13}-loguru-latest

    # OpenAI
    {py3.9,py3.11,py3.12}-openai-v1
    {py3.9,py3.11,py3.12}-openai-latest
    {py3.9,py3.11,py3.12}-openai-notiktoken

    # OpenFeature
    {py3.8,py3.12,py3.13}-openfeature-v0.7
    {py3.8,py3.12,py3.13}-openfeature-latest

    # LaunchDarkly
    {py3.8,py3.12,py3.13}-launchdarkly-v9.8.0
    {py3.8,py3.12,py3.13}-launchdarkly-latest

    # OpenTelemetry (OTel)
    {py3.7,py3.9,py3.12,py3.13}-opentelemetry

    # OpenTelemetry Experimental (POTel)
    {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-potel

    # pure_eval
    {py3.6,py3.12,py3.13}-pure_eval

    # PyMongo (Mongo DB)
    {py3.6}-pymongo-v{3.1}
    {py3.6,py3.9}-pymongo-v{3.12}
    {py3.6,py3.11}-pymongo-v{4.0}
    {py3.7,py3.11,py3.12}-pymongo-v{4.3,4.7}
    {py3.7,py3.12,py3.13}-pymongo-latest

    # Pyramid
    {py3.6,py3.11}-pyramid-v{1.6}
    {py3.6,py3.11,py3.12}-pyramid-v{1.10}
    {py3.6,py3.11,py3.12}-pyramid-v{2.0}
    {py3.6,py3.11,py3.12}-pyramid-latest

    # Quart
    {py3.7,py3.11}-quart-v{0.16}
    {py3.8,py3.11,py3.12}-quart-v{0.19}
    {py3.8,py3.12,py3.13}-quart-latest

    # Ray
    {py3.10,py3.11}-ray-v{2.34}
    {py3.10,py3.11}-ray-latest

    # Redis
    {py3.6,py3.8}-redis-v{3}
    {py3.7,py3.8,py3.11}-redis-v{4}
    {py3.7,py3.11,py3.12}-redis-v{5}
    {py3.7,py3.12,py3.13}-redis-latest

    # Redis Cluster
    {py3.6,py3.8}-redis_py_cluster_legacy-v{1,2}
    # no -latest, not developed anymore

    # Requests
    {py3.6,py3.8,py3.12,py3.13}-requests

    # RQ (Redis Queue)
    {py3.6}-rq-v{0.6}
    {py3.6,py3.9}-rq-v{0.13,1.0}
    {py3.6,py3.11}-rq-v{1.5,1.10}
    {py3.7,py3.11,py3.12}-rq-v{1.15,1.16}
    {py3.7,py3.12,py3.13}-rq-latest

    # Sanic
    {py3.6,py3.7}-sanic-v{0.8}
    {py3.6,py3.8}-sanic-v{20}
    {py3.7,py3.11}-sanic-v{22}
    {py3.7,py3.11}-sanic-v{23}
    {py3.8,py3.11,py3.12}-sanic-latest

    # Spark
    {py3.8,py3.10,py3.11}-spark-v{3.1,3.3,3.5}
    {py3.8,py3.10,py3.11,py3.12}-spark-latest

    # Starlette
    {py3.7,py3.10}-starlette-v{0.19}
    {py3.7,py3.11}-starlette-v{0.20,0.24,0.28}
    {py3.8,py3.11,py3.12}-starlette-v{0.32,0.36}
    {py3.8,py3.12,py3.13}-starlette-latest

    # Starlite
    {py3.8,py3.11}-starlite-v{1.48,1.51}
    # 1.51.14 is the last starlite version; the project continues as litestar

    # SQL Alchemy
    {py3.6,py3.9}-sqlalchemy-v{1.2,1.4}
    {py3.7,py3.11}-sqlalchemy-v{2.0}
    {py3.7,py3.12,py3.13}-sqlalchemy-latest

    # Strawberry
    {py3.8,py3.11}-strawberry-v{0.209}
    {py3.8,py3.11,py3.12}-strawberry-v{0.222}
    {py3.8,py3.12,py3.13}-strawberry-latest

    # Tornado
    {py3.8,py3.11,py3.12}-tornado-v{6.0}
    {py3.8,py3.11,py3.12}-tornado-v{6.2}
    {py3.8,py3.11,py3.12}-tornado-latest

    # Trytond
    {py3.6}-trytond-v{4}
    {py3.6,py3.8}-trytond-v{5}
    {py3.6,py3.11}-trytond-v{6}
    {py3.8,py3.11,py3.12}-trytond-v{7}
    {py3.8,py3.12,py3.13}-trytond-latest

[testenv]
deps =
    # if you change requirements-testing.txt and your change is not being reflected
    # in what's installed by tox (when running tox locally), try running tox
    # with the -r flag
    -r requirements-testing.txt

    linters: -r requirements-linting.txt
    linters: werkzeug<2.3.0

    # === Common ===
    py3.8-common: hypothesis
    common: pytest-asyncio
    # See https://github.com/pytest-dev/pytest/issues/9621
    # and https://github.com/pytest-dev/pytest-forked/issues/67
    # for justification of the upper bound on pytest
    {py3.6,py3.7}-common: pytest<7.0.0
    {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common: pytest

    # === Gevent ===
    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0
    {py3.12}-gevent: gevent
    # See https://github.com/pytest-dev/pytest/issues/9621
    # and https://github.com/pytest-dev/pytest-forked/issues/67
    # for justification of the upper bound on pytest
    {py3.6,py3.7}-gevent: pytest<7.0.0
    {py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest

    # === Integrations ===

    # AIOHTTP
    aiohttp-v3.4: aiohttp~=3.4.0
    aiohttp-v3.8: aiohttp~=3.8.0
    aiohttp-latest: aiohttp
    aiohttp: pytest-aiohttp
    aiohttp-v3.8: pytest-asyncio
    aiohttp-latest: pytest-asyncio

    # Anthropic
    anthropic: pytest-asyncio
    anthropic-v0.25: anthropic~=0.25.0
    anthropic-v0.16: anthropic~=0.16.0
    anthropic-latest: anthropic

    # Ariadne
    ariadne-v0.20: ariadne~=0.20.0
    ariadne-latest: ariadne
    ariadne: fastapi
    ariadne: flask
    ariadne: httpx

    # Arq
    arq-v0.23: arq~=0.23.0
    arq-v0.23: pydantic<2
    arq-latest: arq
    arq: fakeredis>=2.2.0,<2.8
    arq: pytest-asyncio
    arq: async-timeout

    # Asgi
    asgi: pytest-asyncio
    asgi: async-asgi-testclient

    # Asyncpg
    asyncpg-v0.23: asyncpg~=0.23.0
    asyncpg-latest: asyncpg
    asyncpg: pytest-asyncio

    # AWS Lambda
    aws_lambda: boto3

    # Beam
    beam-v2.12: apache-beam~=2.12.0
    beam-latest: apache-beam

    # Boto3
    boto3-v1.12: boto3~=1.12.0
    boto3-v1.23: boto3~=1.23.0
    boto3-v1.34: boto3~=1.34.0
    boto3-latest: boto3

    # Bottle
    bottle: Werkzeug<2.1.0
    bottle-v0.12: bottle~=0.12.0
    bottle-latest: bottle

    # Celery
    celery: redis
    celery-v4: Celery~=4.0
    celery-v5.0: Celery~=5.0.0
    celery-v5.1: Celery~=5.1.0
    celery-v5.2: Celery~=5.2.0
    celery-v5.3: Celery~=5.3.0
    celery-v5.4: Celery~=5.4.0
    celery-latest: Celery

    celery: newrelic
    {py3.7}-celery: importlib-metadata<5.0

    # Chalice
    chalice-v1.16: chalice~=1.16.0
    chalice-latest: chalice
    chalice: pytest-chalice==0.0.5

    {py3.7,py3.8}-chalice: botocore~=1.31

    # Clickhouse Driver
    clickhouse_driver-v0.2.0: clickhouse_driver~=0.2.0
    clickhouse_driver-latest: clickhouse_driver

    # Cohere
    cohere-v5: cohere~=5.3.3
    cohere-latest: cohere

    # Django
    django: psycopg2-binary
    django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
    django-v{2.0,2.2,3.0,3.2,4.0,4.1,4.2,5.0,5.1}: channels[daphne]
    django-v{1.11,2.0,2.2,3.0,3.2}: Werkzeug<2.1.0
    django-v{1.11,2.0,2.2,3.0}: pytest-django<4.0
    django-v{3.2,4.0,4.1,4.2,5.0,5.1}: pytest-django
    django-v{4.0,4.1,4.2,5.0,5.1}: djangorestframework
    django-v{4.0,4.1,4.2,5.0,5.1}: pytest-asyncio
    django-v{4.0,4.1,4.2,5.0,5.1}: Werkzeug
    django-latest: djangorestframework
    django-latest: pytest-asyncio
    django-latest: pytest-django
    django-latest: Werkzeug
    django-latest: channels[daphne]

    django-v1.11: Django~=1.11.0
    django-v2.0: Django~=2.0.0
    django-v2.2: Django~=2.2.0
    django-v3.0: Django~=3.0.0
    django-v3.2: Django~=3.2.0
    django-v4.0: Django~=4.0.0
    django-v4.1: Django~=4.1.0
    django-v4.2: Django~=4.2.0
    django-v5.0: Django~=5.0.0
    django-v5.1: Django==5.1rc1
    django-latest: Django

    # dramatiq
    dramatiq-v1.13: dramatiq>=1.13,<1.14
    dramatiq-v1.15: dramatiq>=1.15,<1.16
    dramatiq-v1.17: dramatiq>=1.17,<1.18
    dramatiq-latest: dramatiq

    # Falcon
    falcon-v1.4: falcon~=1.4.0
    falcon-v1: falcon~=1.0
    falcon-v2: falcon~=2.0
    falcon-v3: falcon~=3.0
    falcon-v4: falcon~=4.0
    falcon-latest: falcon

    # FastAPI
    fastapi: httpx
    # (this is a dependency of httpx)
    fastapi: anyio<4.0.0
    fastapi: pytest-asyncio
    fastapi: python-multipart
    fastapi: requests
    fastapi-v{0.79}: fastapi~=0.79.0
    fastapi-latest: fastapi

    # Flask
    flask: flask-login
    flask-v{1,2.0}: Werkzeug<2.1.0
    flask-v{1,2.0}: markupsafe<2.1.0
    flask-v{3}: Werkzeug
    flask-v1: Flask~=1.0
    flask-v2: Flask~=2.0
    flask-v3: Flask~=3.0
    flask-latest: Flask

    # GQL
    gql-v{3.4}: gql[all]~=3.4.0
    gql-latest: gql[all]

    # Graphene
    graphene: blinker
    graphene: fastapi
    graphene: flask
    graphene: httpx
    graphene-v{3.3}: graphene~=3.3.0
    graphene-latest: graphene

    # gRPC
    grpc: protobuf
    grpc: mypy-protobuf
    grpc: types-protobuf
    grpc: pytest-asyncio
    grpc-v1.39: grpcio~=1.39.0
    grpc-v1.49: grpcio~=1.49.1
    grpc-v1.59: grpcio~=1.59.0
    grpc-latest: grpcio

    # HTTPX
    httpx-v0.16: pytest-httpx==0.10.0
    httpx-v0.18: pytest-httpx==0.12.0
    httpx-v0.20: pytest-httpx==0.14.0
    httpx-v0.22: pytest-httpx==0.19.0
    httpx-v0.23: pytest-httpx==0.21.0
    httpx-v0.24: pytest-httpx==0.22.0
    httpx-v0.25: pytest-httpx==0.25.0
    httpx: pytest-httpx
    # anyio is a dep of httpx
    httpx: anyio<4.0.0
    httpx-v0.16: httpx~=0.16.0
    httpx-v0.18: httpx~=0.18.0
    httpx-v0.20: httpx~=0.20.0
    httpx-v0.22: httpx~=0.22.0
    httpx-v0.23: httpx~=0.23.0
    httpx-v0.24: httpx~=0.24.0
    httpx-v0.25: httpx~=0.25.0
    httpx-v0.27: httpx~=0.27.0
    httpx-latest: httpx

    # Huey
    huey-v2.0: huey~=2.0.0
    huey-latest: huey

    # Huggingface Hub
    huggingface_hub-v0.22: huggingface_hub~=0.22.2
    huggingface_hub-latest: huggingface_hub

    # Langchain
    langchain-v0.1: openai~=1.0.0
    langchain-v0.1: langchain~=0.1.11
    langchain-v0.1: tiktoken~=0.6.0
    langchain-latest: langchain
    langchain-latest: langchain-openai
    langchain-latest: openai>=1.6.1
    langchain-latest: tiktoken~=0.6.0
    langchain-notiktoken: langchain
    langchain-notiktoken: langchain-openai
    langchain-notiktoken: openai>=1.6.1

    # Litestar
    litestar: pytest-asyncio
    litestar: python-multipart
    litestar: requests
    litestar: cryptography
    litestar-v2.0: litestar~=2.0.0
    litestar-v2.3: litestar~=2.3.0
    litestar-v2.5: litestar~=2.5.0
    litestar-latest: litestar

    # Loguru
    loguru-v0.5: loguru~=0.5.0
    loguru-latest: loguru

    # OpenAI
    openai: pytest-asyncio
    openai-v1: openai~=1.0.0
    openai-v1: tiktoken~=0.6.0
    openai-latest: openai
    openai-latest: tiktoken~=0.6.0
    openai-notiktoken: openai

    # OpenFeature
    openfeature-v0.7: openfeature-sdk~=0.7.1
    openfeature-latest: openfeature-sdk

    # LaunchDarkly
    launchdarkly-v9.8.0: launchdarkly-server-sdk~=9.8.0
    launchdarkly-latest: launchdarkly-server-sdk

    # OpenTelemetry (OTel)
    opentelemetry: opentelemetry-distro

    # OpenTelemetry Experimental (POTel)
    potel: -e .[opentelemetry-experimental]

    # pure_eval
    pure_eval: pure_eval

    # PyMongo (MongoDB)
    pymongo: mockupdb
    pymongo-v3.1: pymongo~=3.1.0
    pymongo-v3.13: pymongo~=3.13.0
    pymongo-v4.0: pymongo~=4.0.0
    pymongo-v4.3: pymongo~=4.3.0
    pymongo-v4.7: pymongo~=4.7.0
    pymongo-latest: pymongo

    # Pyramid
    pyramid: Werkzeug<2.1.0
    pyramid-v1.6: pyramid~=1.6.0
    pyramid-v1.10: pyramid~=1.10.0
    pyramid-v2.0: pyramid~=2.0.0
    pyramid-latest: pyramid

    # Quart
    quart: quart-auth
    quart: pytest-asyncio
    quart-v0.16: blinker<1.6
    quart-v0.16: jinja2<3.1.0
    quart-v0.16: Werkzeug<2.1.0
    quart-v0.16: hypercorn<0.15.0
    quart-v0.16: quart~=0.16.0
    quart-v0.19: Werkzeug>=3.0.0
    quart-v0.19: quart~=0.19.0
    quart-latest: quart

    # Ray
    ray-v2.34: ray~=2.34.0
    ray-latest: ray

    # Redis
    redis: fakeredis!=1.7.4
    redis: pytest<8.0.0
    {py3.6,py3.7}-redis: fakeredis!=2.26.0  # https://github.com/cunla/fakeredis-py/issues/341
    {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-redis: pytest-asyncio
    redis-v3: redis~=3.0
    redis-v4: redis~=4.0
    redis-v5: redis~=5.0
    redis-latest: redis

    # Redis Cluster
    redis_py_cluster_legacy-v1: redis-py-cluster~=1.0
    redis_py_cluster_legacy-v2: redis-py-cluster~=2.0

    # Requests
    requests: requests>=2.0

    # RQ (Redis Queue)
    # https://github.com/jamesls/fakeredis/issues/245
    rq-v{0.6}: fakeredis<1.0
    rq-v{0.6}: redis<3.2.2
    rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4
    rq-v{1.15,1.16}: fakeredis
    {py3.6,py3.7}-rq-v{1.15,1.16}: fakeredis!=2.26.0  # https://github.com/cunla/fakeredis-py/issues/341
    rq-latest: fakeredis
    {py3.6,py3.7}-rq-latest: fakeredis!=2.26.0  # https://github.com/cunla/fakeredis-py/issues/341
    rq-v0.6: rq~=0.6.0
    rq-v0.13: rq~=0.13.0
    rq-v1.0: rq~=1.0.0
    rq-v1.5: rq~=1.5.0
    rq-v1.10: rq~=1.10.0
    rq-v1.15: rq~=1.15.0
    rq-v1.16: rq~=1.16.0
    rq-latest: rq

    # Sanic
    sanic: websockets<11.0
    sanic: aiohttp
    sanic-v{22,23}: sanic_testing
    sanic-latest: sanic_testing
    {py3.6}-sanic: aiocontextvars==0.2.1
    sanic-v0.8: sanic~=0.8.0
    sanic-v20: sanic~=20.0
    sanic-v22: sanic~=22.0
    sanic-v23: sanic~=23.0
    sanic-latest: sanic

    # Spark
    spark-v3.1: pyspark~=3.1.0
    spark-v3.3: pyspark~=3.3.0
    spark-v3.5: pyspark~=3.5.0
    spark-latest: pyspark

    # Starlette
    starlette: pytest-asyncio
    starlette: python-multipart
    starlette: requests
    starlette: httpx
    # (this is a dependency of httpx)
    starlette: anyio<4.0.0
    starlette: jinja2
    starlette-v0.19: starlette~=0.19.0
    starlette-v0.20: starlette~=0.20.0
    starlette-v0.24: starlette~=0.24.0
    starlette-v0.28: starlette~=0.28.0
    starlette-v0.32: starlette~=0.32.0
    starlette-v0.36: starlette~=0.36.0
    starlette-latest: starlette

    # Starlite
    starlite: pytest-asyncio
    starlite: python-multipart
    starlite: requests
    starlite: cryptography
    starlite: pydantic<2.0.0
    starlite-v{1.48}: starlite~=1.48.0
    starlite-v{1.51}: starlite~=1.51.0

    # SQLAlchemy
    sqlalchemy-v1.2: sqlalchemy~=1.2.0
    sqlalchemy-v1.4: sqlalchemy~=1.4.0
    sqlalchemy-v2.0: sqlalchemy~=2.0.0
    sqlalchemy-latest: sqlalchemy

    # Strawberry
    strawberry: fastapi
    strawberry: flask
    strawberry: httpx
    strawberry-v0.209: strawberry-graphql[fastapi,flask]~=0.209.0
    strawberry-v0.222: strawberry-graphql[fastapi,flask]~=0.222.0
    strawberry-latest: strawberry-graphql[fastapi,flask]

    # Tornado
    # Tornado <6.4.1 is incompatible with Pytest ≥8.2
    # See https://github.com/tornadoweb/tornado/pull/3382.
    tornado-{v6.0,v6.2}: pytest<8.2
    tornado-v6.0: tornado~=6.0.0
    tornado-v6.2: tornado~=6.2.0
    tornado-latest: tornado

    # Trytond
    trytond: werkzeug
    trytond-v4: werkzeug<1.0
    trytond-v4: trytond~=4.0
    trytond-v5: trytond~=5.0
    trytond-v6: trytond~=6.0
    trytond-v7: trytond~=7.0
    trytond-latest: trytond

setenv =
    PYTHONDONTWRITEBYTECODE=1
    OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
    COVERAGE_FILE=.coverage-sentry-{envname}
    py3.6: COVERAGE_RCFILE=.coveragerc36

    django: DJANGO_SETTINGS_MODULE=tests.integrations.django.myapp.settings

    common: TESTPATH=tests
    gevent: TESTPATH=tests
    aiohttp: TESTPATH=tests/integrations/aiohttp
    anthropic: TESTPATH=tests/integrations/anthropic
    ariadne: TESTPATH=tests/integrations/ariadne
    arq: TESTPATH=tests/integrations/arq
    asgi: TESTPATH=tests/integrations/asgi
    asyncpg: TESTPATH=tests/integrations/asyncpg
    aws_lambda: TESTPATH=tests/integrations/aws_lambda
    beam: TESTPATH=tests/integrations/beam
    boto3: TESTPATH=tests/integrations/boto3
    bottle: TESTPATH=tests/integrations/bottle
    celery: TESTPATH=tests/integrations/celery
    chalice: TESTPATH=tests/integrations/chalice
    clickhouse_driver: TESTPATH=tests/integrations/clickhouse_driver
    cohere: TESTPATH=tests/integrations/cohere
    cloud_resource_context: TESTPATH=tests/integrations/cloud_resource_context
    django: TESTPATH=tests/integrations/django
    dramatiq: TESTPATH=tests/integrations/dramatiq
    falcon: TESTPATH=tests/integrations/falcon
    fastapi:  TESTPATH=tests/integrations/fastapi
    flask: TESTPATH=tests/integrations/flask
    gcp: TESTPATH=tests/integrations/gcp
    gql: TESTPATH=tests/integrations/gql
    graphene: TESTPATH=tests/integrations/graphene
    grpc: TESTPATH=tests/integrations/grpc
    httpx: TESTPATH=tests/integrations/httpx
    huey: TESTPATH=tests/integrations/huey
    huggingface_hub: TESTPATH=tests/integrations/huggingface_hub
    langchain: TESTPATH=tests/integrations/langchain
    launchdarkly: TESTPATH=tests/integrations/launchdarkly
    litestar: TESTPATH=tests/integrations/litestar
    loguru: TESTPATH=tests/integrations/loguru
    openai: TESTPATH=tests/integrations/openai
    openfeature: TESTPATH=tests/integrations/openfeature
    opentelemetry: TESTPATH=tests/integrations/opentelemetry
    potel: TESTPATH=tests/integrations/opentelemetry
    pure_eval: TESTPATH=tests/integrations/pure_eval
    pymongo: TESTPATH=tests/integrations/pymongo
    pyramid: TESTPATH=tests/integrations/pyramid
    quart: TESTPATH=tests/integrations/quart
    ray: TESTPATH=tests/integrations/ray
    redis: TESTPATH=tests/integrations/redis
    redis_py_cluster_legacy: TESTPATH=tests/integrations/redis_py_cluster_legacy
    requests: TESTPATH=tests/integrations/requests
    rq: TESTPATH=tests/integrations/rq
    sanic: TESTPATH=tests/integrations/sanic
    spark: TESTPATH=tests/integrations/spark
    starlette: TESTPATH=tests/integrations/starlette
    starlite: TESTPATH=tests/integrations/starlite
    sqlalchemy: TESTPATH=tests/integrations/sqlalchemy
    strawberry: TESTPATH=tests/integrations/strawberry
    tornado: TESTPATH=tests/integrations/tornado
    trytond: TESTPATH=tests/integrations/trytond
    socket: TESTPATH=tests/integrations/socket

passenv =
    SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID
    SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY
    SENTRY_PYTHON_TEST_POSTGRES_HOST
    SENTRY_PYTHON_TEST_POSTGRES_USER
    SENTRY_PYTHON_TEST_POSTGRES_PASSWORD
    SENTRY_PYTHON_TEST_POSTGRES_NAME

usedevelop = True

extras =
    bottle: bottle
    falcon: falcon
    flask: flask
    pymongo: pymongo

basepython =
    py3.6: python3.6
    py3.7: python3.7
    py3.8: python3.8
    py3.9: python3.9
    py3.10: python3.10
    py3.11: python3.11
    py3.12: python3.12
    py3.13: python3.13

    # Python version is pinned here because flake8 actually behaves differently
    # depending on which version is used. You can patch this out to point to
    # some random Python 3 binary, but then you get guaranteed mismatches with
    # CI. Other tools such as mypy and black have options that pin the Python
    # version.
    linters: python3.12

commands =
    {py3.7,py3.8}-boto3: pip install urllib3<2.0.0

    ; https://github.com/pallets/flask/issues/4455
    {py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"

    ; Running `pytest` as an executable suffers from an import error
    ; when loading tests in scenarios. In particular, django fails to
    ; load the settings from the test module.
    python -m pytest {env:TESTPATH} -o junit_suite_name={envname} {posargs}

[testenv:linters]
commands =
    flake8 tests sentry_sdk
    black --check tests sentry_sdk
    mypy sentry_sdk